From 51d8874a8fdf9488615aab7d80e7597f5b0d2341 Mon Sep 17 00:00:00 2001 From: jensroets Date: Thu, 8 Sep 2022 16:50:02 +0200 Subject: [PATCH 001/686] 94299 Multiple Bitstream deletion endpoint --- .../app/rest/RestResourceController.java | 33 + .../repository/BitstreamRestRepository.java | 44 + .../rest/repository/DSpaceRestRepository.java | 18 + .../app/rest/BitstreamRestRepositoryIT.java | 955 ++++++++++++++++++ 4 files changed, 1050 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java index b82b4830753c..24468660f016 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_HEX32; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG; @@ -55,6 +56,8 @@ import org.dspace.app.rest.utils.RestRepositoryUtils; import org.dspace.app.rest.utils.Utils; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -1050,6 +1053,13 @@ public ResponseEntity> delete(HttpServletRequest request, return deleteInternal(apiCategory, model, uuid); } + @RequestMapping(method = RequestMethod.DELETE, consumes = {"text/uri-list"}) + public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model) + throws HttpRequestMethodNotSupportedException { + return deleteUriListInternal(request, apiCategory, model); + } + /** * Internal method to delete resource. * @@ -1067,6 +1077,29 @@ private ResponseEntity> deleteI return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); } + public ResponseEntity> deleteUriListInternal( + HttpServletRequest request, + String apiCategory, + String model) + throws HttpRequestMethodNotSupportedException { + checkModelPluralForm(apiCategory, model); + DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); + Context context = obtainContext(request); + List dsoStringList = utils.getStringListFromRequest(request); + List dsoList = utils.constructDSpaceObjectList(context, dsoStringList); + if (dsoStringList.size() != dsoList.size()) { + throw new ResourceNotFoundException("One or more bitstreams could not be found."); + } + try { + repository.delete(dsoList); + } catch (ClassCastException e) { + log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory + + " and model: " + model, e); + return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR); + } + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + /** * Execute a PUT request for an entity with id of type UUID; * diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index ae3cf91d4c40..f599d993be43 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -10,6 +10,8 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -147,6 +149,48 @@ protected void delete(Context context, UUID id) throws AuthorizeException { } } + @Override + protected void deleteList(Context context, List dsoList) + throws SQLException, AuthorizeException { + // check if list is empty + if (dsoList.isEmpty()) { + throw new ResourceNotFoundException("No bitstreams given."); + } + // check if every DSO is a Bitstream + if (dsoList.stream().anyMatch(dso -> !(dso instanceof Bitstream))) { + throw new UnprocessableEntityException("Not all given items are bitstreams."); + } + // check that they're all part of the same Item + List items = new ArrayList<>(); + for (DSpaceObject dso : dsoList) { + Bitstream bit = bs.find(context, dso.getID()); + DSpaceObject bitstreamParent = bs.getParentObject(context, bit); + if (bit == null) { + throw new ResourceNotFoundException("The bitstream with uuid " + dso.getID() + " could not be found"); + } + // we have to check if the bitstream has already been deleted + if (bit.isDeleted()) { + throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID() + + " was already deleted"); + } else { + items.add(bitstreamParent); + } + } + if (items.stream().distinct().count() > 1) { + throw new UnprocessableEntityException("Not all given items are part of the same Item."); + } + // delete all Bitstreams + Iterator iterator = dsoList.iterator(); + while (iterator.hasNext()) { + Bitstream bit = (Bitstream) iterator.next(); + try { + bs.delete(context, bit); + } catch (SQLException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + } + /** * Find the bitstream for the provided handle and sequence or filename. * When a bitstream can be found with the sequence ID it will be returned if the user has "METADATA_READ" access. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 01f127eca5ac..219b7c4123b2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -26,6 +26,7 @@ import org.dspace.app.rest.model.RestAddressableModel; import org.dspace.app.rest.model.patch.Patch; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; import org.dspace.content.service.MetadataFieldService; import org.dspace.core.Context; import org.springframework.beans.factory.BeanNameAware; @@ -256,6 +257,23 @@ public void deleteAll() { } + public void delete(List dsoList) { + Context context = obtainContext(); + try { + getThisRepository().deleteList(context, dsoList); + context.commit(); + } catch (AuthorizeException e) { + throw new RESTAuthorizationException(e); + } catch (SQLException ex) { + throw new RuntimeException(ex.getMessage(), ex); + } + } + + protected void deleteList(Context context, List list) + throws AuthorizeException, SQLException, RepositoryMethodNotImplementedException { + throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); + } + @Override /** * This method cannot be implemented we required all the find method to be paginated diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index f9c1e469fcfe..391d9e419330 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -13,6 +13,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -1201,6 +1202,960 @@ public void deleteDeleted() throws Exception { .andExpect(status().isNotFound()); } + @Test + public void deleteListOneBitstream() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + String bitstreamContent = "ThisIsSomeDummyText"; + //Add a bitstream to an item + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream") + .withDescription("Description") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListOneOfMultipleBitstreams() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete bitstream1 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete for bitstream1 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isNotFound()); + + // check that bitstream2 still exists + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())); + + // check that bitstream3 still exists + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())) + ; + } + + @Test + public void deleteListAllBitstreams() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete all bitstreams + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete for bitstream1 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isNotFound()); + + // Verify 404 after delete for bitstream2 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isNotFound()); + + // Verify 404 after delete for bitstream3 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListForbidden() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + // Delete using an unauthorized user + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isForbidden()); + + // Verify the bitstreams are still here + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListUnauthorized() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + // Delete as anonymous + getClient().perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isUnauthorized()); + + // Verify the bitstreams are still here + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListEmpty() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete with empty list throws 404 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("")) + .andExpect(status().isNotFound()); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListNotBitstream() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete with list containing non-Bitstream throws 422 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() + + " \n http://localhost:8080/server/api/core/items/" + publicItem1.getID())) + .andExpect(status().is(422)); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListDifferentItems() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. Two public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 1 bitstream to each item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem2, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete with list containing Bitstreams from different items throws 422 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().is(422)); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + } + + @Test + public void deleteListLogo() throws Exception { + // We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + // ** GIVEN ** + // 1. A community with a logo + parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community") + .build(); + + // 2. A collection with a logo + Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection") + .withLogo("logo_collection").build(); + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // trying to DELETE parentCommunity logo and collection logo should work + // we have to delete them separately otherwise it will throw 422 as they belong to different items + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + parentCommunity.getLogo().getID())) + .andExpect(status().is(204)); + + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + col.getLogo().getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete for parentCommunity logo + getClient(token).perform(get("/api/core/bitstreams/" + parentCommunity.getLogo().getID())) + .andExpect(status().isNotFound()); + + // Verify 404 after delete for collection logo + getClient(token).perform(get("/api/core/bitstreams/" + col.getLogo().getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListMissing() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + + // Delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + + // Verify 404 after failed delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListOneMissing() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete all bitstreams and a missing bitstream returns 404 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListOneMissingDifferentItems() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. Two public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 1 bitstream to each item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem2, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete all bitstreams and a missing bitstream returns 404 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + } + + @Test + public void deleteListDeleted() throws Exception { + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + String bitstreamContent = "ThisIsSomeDummyText"; + //Add a bitstream to an item + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream") + .withDescription("Description") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().is(204)); + + // Verify 404 when trying to delete a non-existing, already deleted, bitstream + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().is(422)); + } + + @Test + public void deleteListOneDeleted() throws Exception { + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete bitstream1 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().is(204)); + + // Verify 404 when trying to delete a non-existing, already deleted, bitstream + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().is(422)); + } + @Test public void patchBitstreamMetadataAuthorized() throws Exception { runPatchMetadataTests(admin, 200); From 464465560187002f0d50dbd0f6a9f12044a42723 Mon Sep 17 00:00:00 2001 From: jensroets Date: Wed, 14 Sep 2022 15:49:03 +0200 Subject: [PATCH 002/686] 94299 Multiple Bitstream deletion endpoint: rename items to parents --- .../dspace/app/rest/repository/BitstreamRestRepository.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index f599d993be43..3696b3866808 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -161,7 +161,7 @@ protected void deleteList(Context context, List dsoList) throw new UnprocessableEntityException("Not all given items are bitstreams."); } // check that they're all part of the same Item - List items = new ArrayList<>(); + List parents = new ArrayList<>(); for (DSpaceObject dso : dsoList) { Bitstream bit = bs.find(context, dso.getID()); DSpaceObject bitstreamParent = bs.getParentObject(context, bit); @@ -173,10 +173,10 @@ protected void deleteList(Context context, List dsoList) throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID() + " was already deleted"); } else { - items.add(bitstreamParent); + parents.add(bitstreamParent); } } - if (items.stream().distinct().count() > 1) { + if (parents.stream().distinct().count() > 1) { throw new UnprocessableEntityException("Not all given items are part of the same Item."); } // delete all Bitstreams From b05d19ed6caca47fb0f599fb48145223f934b272 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Sep 2022 19:17:24 +0200 Subject: [PATCH 003/686] Always use md5 checksum for data integrity check. Send it to S3 to exclude corruption during upload --- .../storage/bitstore/S3BitStoreService.java | 30 ++++++++++++++----- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index 992b940df2ba..f5225154db1d 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -8,8 +8,12 @@ package org.dspace.storage.bitstore; import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.security.DigestInputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.util.Map; import java.util.function.Supplier; import javax.validation.constraints.NotNull; @@ -35,7 +39,7 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.io.FileUtils; +import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; @@ -258,15 +262,24 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { String key = getFullKey(bitstream.getInternalId()); //Copy istream to temp file, and send the file, with some metadata File scratchFile = File.createTempFile(bitstream.getInternalId(), "s3bs"); - try { - FileUtils.copyInputStreamToFile(in, scratchFile); - long contentLength = scratchFile.length(); - + try ( + FileOutputStream fos = new FileOutputStream(scratchFile); + // Read through a digest input stream that will work out the MD5 + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); + ) { + Utils.bufferedCopy(dis, fos); + in.close(); + byte[] md5Digest = dis.getMessageDigest().digest(); + String md5Base64 = Base64.encodeBase64String(md5Digest); + ObjectMetadata objMetadata = new ObjectMetadata(); + objMetadata.setContentMD5(md5Base64); PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, scratchFile); PutObjectResult putObjectResult = s3Service.putObject(putObjectRequest); - bitstream.setSizeBytes(contentLength); - bitstream.setChecksum(putObjectResult.getETag()); + bitstream.setSizeBytes(scratchFile.length()); + // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if + // the bucket is encrypted + bitstream.setChecksum(Utils.toHex(md5Digest)); bitstream.setChecksumAlgorithm(CSA); scratchFile.delete(); @@ -274,6 +287,9 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { } catch (AmazonClientException | IOException e) { log.error("put(" + bitstream.getInternalId() + ", is)", e); throw new IOException(e); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); } finally { if (scratchFile.exists()) { scratchFile.delete(); From d7d2723d65a8ff2b0ca9f0cf231db0abc7b2ca5b Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 30 Sep 2022 16:08:36 +0200 Subject: [PATCH 004/686] [DURACOM-92] Use TransferManager to download files from S3 --- .../DeleteOnCloseFileInputStream.java | 42 +++++++++++++++++++ .../storage/bitstore/S3BitStoreService.java | 22 ++++++++-- 2 files changed, 60 insertions(+), 4 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java new file mode 100644 index 000000000000..62c24544eeac --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; + +/** + * When inputstream closes, then delete the file + * http://stackoverflow.com/a/4694155/368581 + */ +public class DeleteOnCloseFileInputStream extends FileInputStream { + + private File file; + + public DeleteOnCloseFileInputStream(String fileName) throws FileNotFoundException { + this(new File(fileName)); + } + + public DeleteOnCloseFileInputStream(File file) throws FileNotFoundException { + super(file); + this.file = file; + } + + public void close() throws IOException { + try { + super.close(); + } finally { + if (file != null) { + file.delete(); + file = null; + } + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index f5225154db1d..6f6711458919 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -15,6 +15,7 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Map; +import java.util.UUID; import java.util.function.Supplier; import javax.validation.constraints.NotNull; @@ -32,7 +33,9 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; -import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.transfer.Download; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; @@ -238,9 +241,20 @@ public String generateId() { public InputStream get(Bitstream bitstream) throws IOException { String key = getFullKey(bitstream.getInternalId()); try { - S3Object object = s3Service.getObject(new GetObjectRequest(bucketName, key)); - return (object != null) ? object.getObjectContent() : null; - } catch (AmazonClientException e) { + File tempFile = File.createTempFile("s3-disk-copy-" + UUID.randomUUID(), "temp"); + tempFile.deleteOnExit(); + + GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, key); + + TransferManager transferManager = TransferManagerBuilder.standard() + .withS3Client(s3Service) + .build(); + + Download download = transferManager.download(getObjectRequest, tempFile); + download.waitForCompletion(); + + return new DeleteOnCloseFileInputStream(tempFile); + } catch (AmazonClientException | InterruptedException e) { log.error("get(" + key + ")", e); throw new IOException(e); } From 5210aedbf32d9fda7cde79e407d7ba256aad97a1 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sun, 2 Oct 2022 19:44:29 +0200 Subject: [PATCH 005/686] CST-6950 fix checksum check and history --- .../checker/ChecksumHistoryServiceImpl.java | 3 +- .../storage/bitstore/S3BitStoreService.java | 43 ++++++++++++++++++- dspace/config/modules/storage.cfg | 34 +++++++++++++++ dspace/config/spring/api/bitstore.xml | 1 + 4 files changed, 79 insertions(+), 2 deletions(-) create mode 100644 dspace/config/modules/storage.cfg diff --git a/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java index f8d6560e9246..f7b05d4de9d3 100644 --- a/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java @@ -74,7 +74,8 @@ public void addHistory(Context context, MostRecentChecksum mostRecentChecksum) t if (mostRecentChecksum.getBitstream().isDeleted()) { checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED); } else { - checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.CHECKSUM_MATCH); + checksumResult = checksumResultService.findByCode(context, + mostRecentChecksum.getChecksumResult().getResultCode()); } checksumHistory.setResult(checksumResult); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index 6f6711458919..b1e26f8624de 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -97,6 +97,7 @@ public class S3BitStoreService extends BaseBitStoreService { private String awsSecretKey; private String awsRegionName; private boolean useRelativePath; + private boolean trustS3Etag; /** * container for all the assets @@ -330,7 +331,34 @@ public Map about(Bitstream bitstream, Map attrs) throws IOException { try { ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); if (objectMetadata != null) { - return this.about(objectMetadata, attrs); + if (attrs.containsKey("size_bytes")) { + attrs.put("size_bytes", objectMetadata.getContentLength()); + } + if (attrs.containsKey("checksum")) { + String eTag = objectMetadata.getETag(); + if (trustS3Etag && isMD5Checksum(eTag)) { + attrs.put("checksum", eTag); + } else { + try ( + InputStream in = get(bitstream); + // Read through a digest input stream that will work out the MD5 + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); + ) { + in.close(); + byte[] md5Digest = dis.getMessageDigest().digest(); + String md5Base64 = Base64.encodeBase64String(md5Digest); + attrs.put("checksum", md5Base64); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); + } + } + attrs.put("checksum_algorithm", CSA); + } + if (attrs.containsKey("modified")) { + attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); + } + return attrs; } } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { @@ -343,6 +371,11 @@ public Map about(Bitstream bitstream, Map attrs) throws IOException { return null; } + private boolean isMD5Checksum(String eTag) { + // if the etag is NOT an MD5 it end with -x where x is the number of part used in the multipart upload + return StringUtils.contains(eTag, "-"); + } + /** * Populates map values by checking key existence *
@@ -495,6 +528,14 @@ public void setUseRelativePath(boolean useRelativePath) { this.useRelativePath = useRelativePath; } + public void setTrustS3Etag(boolean trustS3Etag) { + this.trustS3Etag = trustS3Etag; + } + + public boolean isTrustS3Etag() { + return trustS3Etag; + } + /** * Contains a command-line testing tool. Expects arguments: * -a accessKey -s secretKey -f assetFileName diff --git a/dspace/config/modules/storage.cfg b/dspace/config/modules/storage.cfg new file mode 100644 index 000000000000..c19aab7c089e --- /dev/null +++ b/dspace/config/modules/storage.cfg @@ -0,0 +1,34 @@ +#---------------------------------------------------------------# +#-----------------STORAGE CONFIGURATIONS------------------------# +#---------------------------------------------------------------# +# Configuration properties used by the bitstore.xml config file # +# # +#---------------------------------------------------------------# + +# Use the localStore or the s3Store implementation +assetstore.storename.0 = localStore + +# For using a relative path (xx/xx/xx/xxx...) set to true, default it false +assetstore.s3.useRelativePath = false + +## Assetstore S3 configuration, only used if the above configuration +## is set to s3Store + +# S3 bucket name to store assets in, default would generate a bucket +# based on the dspace host name +assetstore.s3.bucketName = +# Subfolder to organize assets within the bucket, in case this bucket +# is shared. Optional, default is root level of bucket +assetstore.s3.subfolder = + +# please do not use these in production but rely on the aws credentials +# discovery mechanism to configure them (ENV VAR, EC2 Iam role, etc.) +assetstore.s3.awsAccessKey = +assetstore.s3.awsSecretKey = +# to force the use of a specific region when credentials are provided +# in this configuratin file. If credentials are left empty this prop +# is ignored +assetstore.s3.awsRegionName = +# trust s3 ETag during the checker process, if it is a md5 checksum +# setting it to false will download the file locally to compute the md5 +assetstore.s3.trustS3Etag = true \ No newline at end of file diff --git a/dspace/config/spring/api/bitstore.xml b/dspace/config/spring/api/bitstore.xml index 15bb3ef1580b..ee5328b5bc68 100644 --- a/dspace/config/spring/api/bitstore.xml +++ b/dspace/config/spring/api/bitstore.xml @@ -23,6 +23,7 @@ + From 70b1ee19f3b5d7e5e0894fcbf0300a6f5f4f3faf Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sun, 2 Oct 2022 20:47:48 +0200 Subject: [PATCH 006/686] CST-6950 fix checksum check --- .../storage/bitstore/S3BitStoreService.java | 37 ++++++++----------- 1 file changed, 15 insertions(+), 22 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index b1e26f8624de..da170fe0b037 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -334,32 +334,25 @@ public Map about(Bitstream bitstream, Map attrs) throws IOException { if (attrs.containsKey("size_bytes")) { attrs.put("size_bytes", objectMetadata.getContentLength()); } - if (attrs.containsKey("checksum")) { - String eTag = objectMetadata.getETag(); - if (trustS3Etag && isMD5Checksum(eTag)) { - attrs.put("checksum", eTag); - } else { - try ( - InputStream in = get(bitstream); - // Read through a digest input stream that will work out the MD5 - DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); - ) { - in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - attrs.put("checksum", md5Base64); - } catch (NoSuchAlgorithmException nsae) { - // Should never happen - log.warn("Caught NoSuchAlgorithmException", nsae); - } - } - attrs.put("checksum_algorithm", CSA); - } if (attrs.containsKey("modified")) { attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); } - return attrs; } + try ( + InputStream in = get(bitstream); + // Read through a digest input stream that will work out the MD5 + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); + ) { + in.close(); + byte[] md5Digest = dis.getMessageDigest().digest(); + String md5Base64 = Base64.encodeBase64String(md5Digest); + attrs.put("checksum", md5Base64); + attrs.put("checksum_algorithm", CSA); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); + } + return attrs; } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { return null; From 75840c70c6df6949f26acc15843e997657008024 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sun, 2 Oct 2022 21:49:28 +0200 Subject: [PATCH 007/686] CST-6950 fix checker report query --- .../dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java b/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java index 66ce666b9d6d..a31e02cbab4a 100644 --- a/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java @@ -92,8 +92,8 @@ public List findByResultTypeInDateRange(Context context, Dat criteriaQuery.where(criteriaBuilder.and( criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode), criteriaBuilder.lessThanOrEqualTo( - mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate), - criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate) + mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate), + criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate) ) ); List orderList = new LinkedList<>(); From 66c452bcf93e5366e2ef4da5bd892a079e31864e Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sun, 2 Oct 2022 22:56:29 +0200 Subject: [PATCH 008/686] CST-6950 fix report email --- .../org/dspace/checker/SimpleReporterServiceImpl.java | 2 ++ dspace-api/src/main/java/org/dspace/core/Email.java | 6 ++++-- .../org/dspace/storage/bitstore/S3BitStoreService.java | 9 --------- dspace/config/modules/storage.cfg | 5 +---- dspace/config/spring/api/bitstore.xml | 1 - 5 files changed, 7 insertions(+), 16 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java b/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java index 26c102e1e78b..ddefb28e1b57 100644 --- a/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java @@ -152,6 +152,7 @@ public int getBitstreamNotFoundReport(Context context, Date startDate, Date endD osw.write("\n"); osw.write(msg("bitstream-not-found-report")); + osw.write(" "); osw.write(applyDateFormatShort(startDate)); osw.write(" "); osw.write(msg("date-range-to")); @@ -230,6 +231,7 @@ public int getUncheckedBitstreamsReport(Context context, OutputStreamWriter osw) osw.write("\n"); osw.write(msg("unchecked-bitstream-report")); + osw.write(" "); osw.write(applyDateFormatShort(new Date())); osw.write("\n\n\n"); diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index 6db27c9e4f18..6b86756e0c08 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -314,6 +314,8 @@ public void send() throws MessagingException, IOException { message.addRecipient(Message.RecipientType.TO, new InternetAddress( i.next())); } + // Get headers defined by the template. + String[] templateHeaders = config.getArrayProperty("mail.message.headers"); // Format the mail message body VelocityEngine templateEngine = new VelocityEngine(); @@ -334,6 +336,7 @@ public void send() throws MessagingException, IOException { repo.putStringResource(contentName, content); // Turn content into a template. template = templateEngine.getTemplate(contentName); + templateHeaders = new String[] {}; } StringWriter writer = new StringWriter(); @@ -351,8 +354,7 @@ public void send() throws MessagingException, IOException { message.setSentDate(date); message.setFrom(new InternetAddress(from)); - // Get headers defined by the template. - for (String headerName : config.getArrayProperty("mail.message.headers")) { + for (String headerName : templateHeaders) { String headerValue = (String) vctx.get(headerName); if ("subject".equalsIgnoreCase(headerName)) { if (null != subject) { diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index da170fe0b037..f287c0a59191 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -97,7 +97,6 @@ public class S3BitStoreService extends BaseBitStoreService { private String awsSecretKey; private String awsRegionName; private boolean useRelativePath; - private boolean trustS3Etag; /** * container for all the assets @@ -521,14 +520,6 @@ public void setUseRelativePath(boolean useRelativePath) { this.useRelativePath = useRelativePath; } - public void setTrustS3Etag(boolean trustS3Etag) { - this.trustS3Etag = trustS3Etag; - } - - public boolean isTrustS3Etag() { - return trustS3Etag; - } - /** * Contains a command-line testing tool. Expects arguments: * -a accessKey -s secretKey -f assetFileName diff --git a/dspace/config/modules/storage.cfg b/dspace/config/modules/storage.cfg index c19aab7c089e..3b9171585ee1 100644 --- a/dspace/config/modules/storage.cfg +++ b/dspace/config/modules/storage.cfg @@ -28,7 +28,4 @@ assetstore.s3.awsSecretKey = # to force the use of a specific region when credentials are provided # in this configuratin file. If credentials are left empty this prop # is ignored -assetstore.s3.awsRegionName = -# trust s3 ETag during the checker process, if it is a md5 checksum -# setting it to false will download the file locally to compute the md5 -assetstore.s3.trustS3Etag = true \ No newline at end of file +assetstore.s3.awsRegionName = \ No newline at end of file diff --git a/dspace/config/spring/api/bitstore.xml b/dspace/config/spring/api/bitstore.xml index ee5328b5bc68..15bb3ef1580b 100644 --- a/dspace/config/spring/api/bitstore.xml +++ b/dspace/config/spring/api/bitstore.xml @@ -23,7 +23,6 @@ - From 9bc01e8f05f00c4b5dec4d68390872f3758e0fd3 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Mon, 3 Oct 2022 12:05:12 +0200 Subject: [PATCH 009/686] [DURACOM-92] Fixed S3BitStoreServiceTest tests --- .../storage/bitstore/S3BitStoreService.java | 2 +- .../bitstore/S3BitStoreServiceTest.java | 164 +++++------------- dspace/config/modules/storage.cfg | 31 ---- 3 files changed, 41 insertions(+), 156 deletions(-) delete mode 100644 dspace/config/modules/storage.cfg diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index 5d531e2b12e8..24204c7cb1c7 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -75,7 +75,7 @@ public class S3BitStoreService extends BaseBitStoreService { /** * Checksum algorithm */ - private static final String CSA = "MD5"; + static final String CSA = "MD5"; // These settings control the way an identifier is hashed into // directory and file names diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java index e972aaa02b00..56080d1f97df 100644 --- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java @@ -9,32 +9,35 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isEmptyOrNullString; -import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.startsWith; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.Charset; import java.util.function.Supplier; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.GetObjectRequest; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.PutObjectResult; -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectInputStream; +import com.amazonaws.services.s3.transfer.Download; import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.Upload; import com.amazonaws.services.s3.transfer.model.UploadResult; import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; import org.dspace.AbstractUnitTest; import org.dspace.content.Bitstream; -import org.dspace.curate.Utils; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; @@ -42,6 +45,7 @@ import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; @@ -158,49 +162,17 @@ public void givenAccessKeysWhenInitThenVerifiesCorrectBuilderCreation() throws I @Test public void givenBucketBitStreamIdInputStreamWhenRetrievingFromS3ThenUsesBucketBitStreamId() throws IOException { String bucketName = "BucketTest"; - String bitStreamId = "BitStreamId"; this.s3BitStoreService.setBucketName(bucketName); this.s3BitStoreService.setUseRelativePath(false); - when(bitstream.getInternalId()).thenReturn(bitStreamId); - - S3Object object = Mockito.mock(S3Object.class); - S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class); - when(object.getObjectContent()).thenReturn(inputStream); - when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object); - this.s3BitStoreService.init(); - assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream)); - verify(this.s3Service).getObject( - ArgumentMatchers.argThat( - request -> - bucketName.contentEquals(request.getBucketName()) && - bitStreamId.contentEquals(request.getKey()) - ) - ); + Download download = mock(Download.class); - } - - @Test - public void givenBucketBitStreamIdWhenNothingFoundOnS3ThenReturnsNull() throws IOException { - String bucketName = "BucketTest"; - String bitStreamId = "BitStreamId"; - this.s3BitStoreService.setBucketName(bucketName); - this.s3BitStoreService.setUseRelativePath(false); - when(bitstream.getInternalId()).thenReturn(bitStreamId); - - when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(null); - - this.s3BitStoreService.init(); - assertThat(this.s3BitStoreService.get(bitstream), Matchers.nullValue()); + when(tm.download(any(GetObjectRequest.class), any(File.class))) + .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content")); - verify(this.s3Service).getObject( - ArgumentMatchers.argThat( - request -> - bucketName.contentEquals(request.getBucketName()) && - bitStreamId.contentEquals(request.getKey()) - ) - ); + InputStream inputStream = this.s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content")); } @@ -214,23 +186,14 @@ public void givenSubFolderWhenRequestsItemFromS3ThenTheIdentifierShouldHavePrope this.s3BitStoreService.setSubfolder(subfolder); when(bitstream.getInternalId()).thenReturn(bitStreamId); - S3Object object = Mockito.mock(S3Object.class); - S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class); - when(object.getObjectContent()).thenReturn(inputStream); - when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object); + Download download = mock(Download.class); + + when(tm.download(any(GetObjectRequest.class), any(File.class))) + .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content")); this.s3BitStoreService.init(); - assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream)); - - verify(this.s3Service).getObject( - ArgumentMatchers.argThat( - request -> - bucketName.equals(request.getBucketName()) && - request.getKey().startsWith(subfolder) && - request.getKey().contains(bitStreamId) && - !request.getKey().contains(File.separator + File.separator) - ) - ); + InputStream inputStream = this.s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content")); } @@ -364,86 +327,39 @@ public void givenBitStreamWhenPutThenCallS3PutMethodAndStoresInBitStream() throw this.s3BitStoreService.setUseRelativePath(false); when(bitstream.getInternalId()).thenReturn(bitStreamId); - File file = Mockito.mock(File.class); - InputStream in = Mockito.mock(InputStream.class); - PutObjectResult putObjectResult = Mockito.mock(PutObjectResult.class); + InputStream in = IOUtils.toInputStream("Test file content", Charset.defaultCharset()); + Upload upload = Mockito.mock(Upload.class); UploadResult uploadResult = Mockito.mock(UploadResult.class); when(upload.waitForUploadResult()).thenReturn(uploadResult); - String mockedTag = "1a7771d5fdd7bfdfc84033c70b1ba555"; - when(file.length()).thenReturn(8L); - try (MockedStatic fileMock = Mockito.mockStatic(File.class)) { - try (MockedStatic fileUtilsMock = Mockito.mockStatic(FileUtils.class)) { - try (MockedStatic curateUtils = Mockito.mockStatic(Utils.class)) { - curateUtils.when(() -> Utils.checksum((File) ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenReturn(mockedTag); - - fileMock - .when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenReturn(file); - - when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenReturn(upload); - - this.s3BitStoreService.init(); - this.s3BitStoreService.put(bitstream, in); - } - } - - } - verify(this.bitstream, Mockito.times(1)).setSizeBytes( - ArgumentMatchers.eq(8L) - ); + when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any())) + .thenReturn(upload); - verify(this.bitstream, Mockito.times(1)).setChecksum( - ArgumentMatchers.eq(mockedTag) - ); + this.s3BitStoreService.init(); + this.s3BitStoreService.put(bitstream, in); - verify(this.tm, Mockito.times(1)).upload( - ArgumentMatchers.eq(bucketName), - ArgumentMatchers.eq(bitStreamId), - ArgumentMatchers.eq(file) - ); + verify(this.bitstream).setSizeBytes(17); + verify(this.bitstream, times(2)).getInternalId(); + verify(this.bitstream).setChecksum("ac79653edeb65ab5563585f2d5f14fe9"); + verify(this.bitstream).setChecksumAlgorithm(org.dspace.storage.bitstore.S3BitStoreService.CSA); + verify(this.tm).upload(eq(bucketName), eq(bitStreamId), any(File.class)); - verify(file, Mockito.times(1)).delete(); + verifyNoMoreInteractions(this.bitstream, this.tm); } - @Test - public void givenBitStreamWhenCallingPutFileCopyingThrowsIOExceptionPutThenFileIsRemovedAndStreamClosed() - throws Exception { - String bucketName = "BucketTest"; - String bitStreamId = "BitStreamId"; - this.s3BitStoreService.setBucketName(bucketName); - this.s3BitStoreService.setUseRelativePath(false); - when(bitstream.getInternalId()).thenReturn(bitStreamId); + private Download writeIntoFile(Download download, InvocationOnMock invocation, String content) { - File file = Mockito.mock(File.class); - InputStream in = Mockito.mock(InputStream.class); - try (MockedStatic fileMock = Mockito.mockStatic(File.class)) { - try (MockedStatic fileUtilsMock = Mockito.mockStatic(FileUtils.class)) { - fileUtilsMock - .when(() -> FileUtils.copyInputStreamToFile(ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenThrow(IOException.class); - fileMock - .when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenReturn(file); - - this.s3BitStoreService.init(); - assertThrows(IOException.class, () -> this.s3BitStoreService.put(bitstream, in)); - } + File file = invocation.getArgument(1, File.class); + try { + FileUtils.write(file, content, Charset.defaultCharset()); + } catch (IOException e) { + throw new RuntimeException(e); } - verify(this.bitstream, Mockito.never()).setSizeBytes(ArgumentMatchers.any(Long.class)); - - verify(this.bitstream, Mockito.never()).setChecksum(ArgumentMatchers.any(String.class)); - - verify(this.s3Service, Mockito.never()).putObject(ArgumentMatchers.any(PutObjectRequest.class)); - - verify(file, Mockito.times(1)).delete(); - + return download; } private int computeSlashes(String internalId) { diff --git a/dspace/config/modules/storage.cfg b/dspace/config/modules/storage.cfg deleted file mode 100644 index 3b9171585ee1..000000000000 --- a/dspace/config/modules/storage.cfg +++ /dev/null @@ -1,31 +0,0 @@ -#---------------------------------------------------------------# -#-----------------STORAGE CONFIGURATIONS------------------------# -#---------------------------------------------------------------# -# Configuration properties used by the bitstore.xml config file # -# # -#---------------------------------------------------------------# - -# Use the localStore or the s3Store implementation -assetstore.storename.0 = localStore - -# For using a relative path (xx/xx/xx/xxx...) set to true, default it false -assetstore.s3.useRelativePath = false - -## Assetstore S3 configuration, only used if the above configuration -## is set to s3Store - -# S3 bucket name to store assets in, default would generate a bucket -# based on the dspace host name -assetstore.s3.bucketName = -# Subfolder to organize assets within the bucket, in case this bucket -# is shared. Optional, default is root level of bucket -assetstore.s3.subfolder = - -# please do not use these in production but rely on the aws credentials -# discovery mechanism to configure them (ENV VAR, EC2 Iam role, etc.) -assetstore.s3.awsAccessKey = -assetstore.s3.awsSecretKey = -# to force the use of a specific region when credentials are provided -# in this configuratin file. If credentials are left empty this prop -# is ignored -assetstore.s3.awsRegionName = \ No newline at end of file From ba0819782a3b73305b8da04fec09ee233cf98bdc Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 11 Nov 2022 16:43:05 +0100 Subject: [PATCH 010/686] [DURACOM-92] Improved S3BitStoreService using embedded S3 --- dspace-api/pom.xml | 8 + .../storage/bitstore/S3BitStoreService.java | 6 +- .../S3BitStoreServiceIntegrationTest.java | 390 +++++++++++++++++ .../bitstore/S3BitStoreServiceTest.java | 396 ------------------ 4 files changed, 400 insertions(+), 400 deletions(-) create mode 100644 dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java delete mode 100644 dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 685005160094..cfc1953d4854 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -832,6 +832,14 @@ + + + io.findify + s3mock_2.13 + 0.2.6 + test + + diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index 434fd191f843..622308b00d9f 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -144,13 +144,11 @@ public S3BitStoreService() {} /** * This constructor is used for test purpose. - * In this way is possible to use a mocked instance of AmazonS3 * - * @param s3Service mocked AmazonS3 service + * @param s3Service AmazonS3 service */ - protected S3BitStoreService(AmazonS3 s3Service, TransferManager tm) { + protected S3BitStoreService(AmazonS3 s3Service) { this.s3Service = s3Service; - this.tm = tm; } @Override diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java new file mode 100644 index 000000000000..f362e94dddc7 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java @@ -0,0 +1,390 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import static com.amazonaws.regions.Regions.DEFAULT_REGION; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.dspace.storage.bitstore.S3BitStoreService.CSA; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.SQLException; + +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.AnonymousAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.Bucket; +import com.amazonaws.services.s3.model.ObjectMetadata; +import io.findify.s3mock.S3Mock; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.matcher.LambdaMatcher; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Utils; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + +/** + * @author Luca Giamminonni (luca.giamminonni at 4science.com) + */ +public class S3BitStoreServiceIntegrationTest extends AbstractIntegrationTestWithDatabase { + + private static final String DEFAULT_BUCKET_NAME = "dspace-asset-localhost"; + + private S3BitStoreService s3BitStoreService; + + private AmazonS3 amazonS3Client; + + private S3Mock s3Mock; + + private Collection collection; + + private File s3Directory; + + @Before + public void setup() throws Exception { + + s3Directory = new File(System.getProperty("java.io.tmpdir"), "s3"); + + s3Mock = S3Mock.create(8001, s3Directory.getAbsolutePath()); + s3Mock.start(); + + amazonS3Client = createAmazonS3Client(); + + s3BitStoreService = new S3BitStoreService(amazonS3Client); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .build(); + + context.restoreAuthSystemState(); + } + + @After + public void cleanUp() throws IOException { + FileUtils.deleteDirectory(s3Directory); + s3Mock.shutdown(); + } + + @Test + public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException { + + String bucketName = "testbucket"; + + amazonS3Client.createBucket(bucketName); + + s3BitStoreService.setBucketName(bucketName); + s3BitStoreService.init(); + + assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(bucketName))); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + String expectedChecksum = generateChecksum(content); + + assertThat(bitstream.getSizeBytes(), is((long) content.length())); + assertThat(bitstream.getChecksum(), is(expectedChecksum)); + assertThat(bitstream.getChecksumAlgorithm(), is(CSA)); + + InputStream inputStream = s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, UTF_8), is(content)); + + String key = s3BitStoreService.getFullKey(bitstream.getInternalId()); + ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(bucketName, key); + assertThat(objectMetadata.getContentMD5(), is(expectedChecksum)); + + } + + @Test + public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException { + + s3BitStoreService.init(); + + assertThat(s3BitStoreService.getBucketName(), is(DEFAULT_BUCKET_NAME)); + + assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(DEFAULT_BUCKET_NAME))); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + String expectedChecksum = generateChecksum(content); + + assertThat(bitstream.getSizeBytes(), is((long) content.length())); + assertThat(bitstream.getChecksum(), is(expectedChecksum)); + assertThat(bitstream.getChecksumAlgorithm(), is(CSA)); + + InputStream inputStream = s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, UTF_8), is(content)); + + String key = s3BitStoreService.getFullKey(bitstream.getInternalId()); + ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key); + assertThat(objectMetadata.getContentMD5(), is(expectedChecksum)); + + } + + @Test + public void testBitstreamPutAndGetWithSubFolder() throws IOException { + + s3BitStoreService.setSubfolder("test/DSpace7/"); + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + InputStream inputStream = s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, UTF_8), is(content)); + + String key = s3BitStoreService.getFullKey(bitstream.getInternalId()); + assertThat(key, startsWith("test/DSpace7/")); + + ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key); + assertThat(objectMetadata, notNullValue()); + + } + + @Test + public void testBitstreamDeletion() throws IOException { + + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + assertThat(s3BitStoreService.get(bitstream), notNullValue()); + + s3BitStoreService.remove(bitstream); + + IOException exception = assertThrows(IOException.class, () -> s3BitStoreService.get(bitstream)); + assertThat(exception.getCause(), instanceOf(AmazonS3Exception.class)); + assertThat(((AmazonS3Exception) exception.getCause()).getStatusCode(), is(404)); + + } + + @Test + public void handleRegisteredIdentifierPrefixInS3() { + String trueBitStreamId = "012345"; + String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId; + // Should be detected as registered bitstream + assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId)); + } + + @Test + public void stripRegisteredBitstreamPrefixWhenCalculatingPath() { + // Set paths and IDs + String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf"; + String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path; + // Paths should be equal, since the getRelativePath method should strip the registered -R prefix + String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId); + assertEquals(s3Path, relativeRegisteredPath); + } + + @Test + public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() { + String path = "01234567890123456789"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() { + String path = "0"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "0" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() { + String path = "01234"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() { + String path = "012345"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException { + StringBuilder path = new StringBuilder("01"); + String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + int slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("2"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("3"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("4"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("56789"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + } + + @Test + public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException { + StringBuilder path = new StringBuilder("01"); + String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + int slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("2"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("3"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("4"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("56789"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + } + + @Test + public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() { + String sInternalId = new StringBuilder("01") + .append(File.separator) + .append("22") + .append(File.separator) + .append("33") + .append(File.separator) + .append("4455") + .toString(); + String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId); + assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator))); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator))); + assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator))); + } + + private String generateChecksum(String content) { + try { + MessageDigest m = MessageDigest.getInstance("MD5"); + m.update(content.getBytes()); + return Utils.toHex(m.digest()); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + } + + private AmazonS3 createAmazonS3Client() { + return AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials())) + .withEndpointConfiguration(new EndpointConfiguration("http://127.0.0.1:8001", DEFAULT_REGION.getName())) + .build(); + } + + private Item createItem() { + return ItemBuilder.createItem(context, collection) + .withTitle("Test item") + .build(); + } + + private Bitstream createBitstream(String content) { + try { + return BitstreamBuilder + .createBitstream(context, createItem(), toInputStream(content)) + .build(); + } catch (SQLException | AuthorizeException | IOException e) { + throw new RuntimeException(e); + } + } + + private Matcher bucketNamed(String name) { + return LambdaMatcher.matches(bucket -> bucket.getName().equals(name)); + } + + private InputStream toInputStream(String content) { + return IOUtils.toInputStream(content, UTF_8); + } + + private int computeSlashes(String internalId) { + int minimum = internalId.length(); + int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel; + int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel); + int slashes = slashesPerLevel + odd; + return Math.min(slashes, S3BitStoreService.directoryLevels); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java deleted file mode 100644 index 8102e9777954..000000000000 --- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java +++ /dev/null @@ -1,396 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.storage.bitstore; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isEmptyOrNullString; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.startsWith; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.Charset; -import java.util.function.Supplier; - -import com.amazonaws.regions.Regions; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; -import com.amazonaws.services.s3.model.GetObjectRequest; -import com.amazonaws.services.s3.transfer.Download; -import com.amazonaws.services.s3.transfer.TransferManager; -import com.amazonaws.services.s3.transfer.Upload; -import com.amazonaws.services.s3.transfer.model.UploadResult; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.dspace.AbstractUnitTest; -import org.dspace.content.Bitstream; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentMatchers; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; - - - - -/** - * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) - * - */ -public class S3BitStoreServiceTest extends AbstractUnitTest { - - private S3BitStoreService s3BitStoreService; - - @Mock - private AmazonS3Client s3Service; - - @Mock - private TransferManager tm; - - @Mock - private Bitstream bitstream; - - @Mock - private Bitstream externalBitstream; - - @Before - public void setUp() throws Exception { - this.s3BitStoreService = new S3BitStoreService(s3Service, tm); - } - - private Supplier mockedServiceSupplier() { - return () -> this.s3Service; - } - - @Test - public void givenBucketWhenInitThenUsesSameBucket() throws IOException { - String bucketName = "Bucket0"; - s3BitStoreService.setBucketName(bucketName); - when(this.s3Service.doesBucketExist(bucketName)).thenReturn(false); - - assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); - - this.s3BitStoreService.init(); - - verify(this.s3Service).doesBucketExist(bucketName); - verify(this.s3Service, Mockito.times(1)).createBucket(bucketName); - assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); - } - - @Test - public void givenEmptyBucketWhenInitThenUsesDefaultBucket() throws IOException { - assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString()); - when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false); - assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); - - this.s3BitStoreService.init(); - - verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); - assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); - assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); - } - - @Test - public void givenAccessKeysWhenInitThenVerifiesCorrectBuilderCreation() throws IOException { - assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString()); - assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); - when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false); - - final String awsAccessKey = "ACCESS_KEY"; - final String awsSecretKey = "SECRET_KEY"; - - this.s3BitStoreService.setAwsAccessKey(awsAccessKey); - this.s3BitStoreService.setAwsSecretKey(awsSecretKey); - - try (MockedStatic mockedS3BitStore = Mockito.mockStatic(S3BitStoreService.class)) { - mockedS3BitStore - .when(() -> - S3BitStoreService.amazonClientBuilderBy( - ArgumentMatchers.any(Regions.class), - ArgumentMatchers.argThat( - credentials -> - awsAccessKey.equals(credentials.getAWSAccessKeyId()) && - awsSecretKey.equals(credentials.getAWSSecretKey()) - ) - ) - ) - .thenReturn(this.mockedServiceSupplier()); - - this.s3BitStoreService.init(); - - mockedS3BitStore.verify( - () -> - S3BitStoreService.amazonClientBuilderBy( - ArgumentMatchers.any(Regions.class), - ArgumentMatchers.argThat( - credentials -> - awsAccessKey.equals(credentials.getAWSAccessKeyId()) && - awsSecretKey.equals(credentials.getAWSSecretKey()) - ) - ) - ); - } - - - verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); - assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); - assertThat(s3BitStoreService.getAwsAccessKey(), Matchers.equalTo(awsAccessKey)); - assertThat(s3BitStoreService.getAwsSecretKey(), Matchers.equalTo(awsSecretKey)); - assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); - } - - @Test - public void givenBucketBitStreamIdInputStreamWhenRetrievingFromS3ThenUsesBucketBitStreamId() throws IOException { - String bucketName = "BucketTest"; - this.s3BitStoreService.setBucketName(bucketName); - this.s3BitStoreService.setUseRelativePath(false); - this.s3BitStoreService.init(); - - Download download = mock(Download.class); - - when(tm.download(any(GetObjectRequest.class), any(File.class))) - .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content")); - - InputStream inputStream = this.s3BitStoreService.get(bitstream); - assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content")); - - } - - @Test - public void givenSubFolderWhenRequestsItemFromS3ThenTheIdentifierShouldHaveProperPath() throws IOException { - String bucketName = "BucketTest"; - String bitStreamId = "012345"; - String subfolder = "/test/DSpace7/"; - this.s3BitStoreService.setBucketName(bucketName); - this.s3BitStoreService.setUseRelativePath(false); - this.s3BitStoreService.setSubfolder(subfolder); - when(bitstream.getInternalId()).thenReturn(bitStreamId); - - Download download = mock(Download.class); - - when(tm.download(any(GetObjectRequest.class), any(File.class))) - .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content")); - - this.s3BitStoreService.init(); - InputStream inputStream = this.s3BitStoreService.get(bitstream); - assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content")); - - } - - @Test - public void handleRegisteredIdentifierPrefixInS3() { - String trueBitStreamId = "012345"; - String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId; - // Should be detected as registered bitstream - assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId)); - } - - @Test - public void stripRegisteredBitstreamPrefixWhenCalculatingPath() { - // Set paths and IDs - String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf"; - String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path; - // Paths should be equal, since the getRelativePath method should strip the registered -R prefix - String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId); - assertEquals(s3Path, relativeRegisteredPath); - } - - @Test - public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() { - String path = "01234567890123456789"; - String computedPath = this.s3BitStoreService.getIntermediatePath(path); - String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; - assertThat(computedPath, equalTo(expectedPath)); - } - - @Test - public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() { - String path = "0"; - String computedPath = this.s3BitStoreService.getIntermediatePath(path); - String expectedPath = "0" + File.separator; - assertThat(computedPath, equalTo(expectedPath)); - } - - @Test - public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() { - String path = "01234"; - String computedPath = this.s3BitStoreService.getIntermediatePath(path); - String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator; - assertThat(computedPath, equalTo(expectedPath)); - } - - @Test - public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() { - String path = "012345"; - String computedPath = this.s3BitStoreService.getIntermediatePath(path); - String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; - assertThat(computedPath, equalTo(expectedPath)); - } - - @Test - public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException { - StringBuilder path = new StringBuilder("01"); - String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - int slashes = computeSlashes(path.toString()); - assertThat(computedPath, Matchers.endsWith(File.separator)); - assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); - - path.append("2"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); - - path.append("3"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); - - path.append("4"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); - - path.append("56789"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); - } - - @Test - public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException { - StringBuilder path = new StringBuilder("01"); - String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - int slashes = computeSlashes(path.toString()); - assertThat(computedPath, Matchers.endsWith(File.separator)); - assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); - - path.append("2"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - slashes = computeSlashes(path.toString()); - assertThat(computedPath, Matchers.endsWith(File.separator)); - assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); - - path.append("3"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - slashes = computeSlashes(path.toString()); - assertThat(computedPath, Matchers.endsWith(File.separator)); - assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); - - path.append("4"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - slashes = computeSlashes(path.toString()); - assertThat(computedPath, Matchers.endsWith(File.separator)); - assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); - - path.append("56789"); - computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); - slashes = computeSlashes(path.toString()); - assertThat(computedPath, Matchers.endsWith(File.separator)); - assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); - } - - @Test - public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() { - String sInternalId = new StringBuilder("01") - .append(File.separator) - .append("22") - .append(File.separator) - .append("33") - .append(File.separator) - .append("4455") - .toString(); - String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId); - assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator))); - assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator))); - assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator))); - } - - @Test - public void givenBitStreamWhenRemoveThenCallS3DeleteMethod() throws Exception { - String bucketName = "BucketTest"; - String bitStreamId = "BitStreamId"; - this.s3BitStoreService.setBucketName(bucketName); - this.s3BitStoreService.setUseRelativePath(false); - when(bitstream.getInternalId()).thenReturn(bitStreamId); - - this.s3BitStoreService.init(); - this.s3BitStoreService.remove(bitstream); - - verify(this.s3Service, Mockito.times(1)).deleteObject(ArgumentMatchers.eq(bucketName), - ArgumentMatchers.eq(bitStreamId)); - - } - - @Test - public void givenBitStreamWhenPutThenCallS3PutMethodAndStoresInBitStream() throws Exception { - String bucketName = "BucketTest"; - String bitStreamId = "BitStreamId"; - this.s3BitStoreService.setBucketName(bucketName); - this.s3BitStoreService.setUseRelativePath(false); - when(bitstream.getInternalId()).thenReturn(bitStreamId); - - InputStream in = IOUtils.toInputStream("Test file content", Charset.defaultCharset()); - - Upload upload = Mockito.mock(Upload.class); - UploadResult uploadResult = Mockito.mock(UploadResult.class); - when(upload.waitForUploadResult()).thenReturn(uploadResult); - - when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenReturn(upload); - - this.s3BitStoreService.init(); - this.s3BitStoreService.put(bitstream, in); - - verify(this.bitstream).setSizeBytes(17); - verify(this.bitstream, times(2)).getInternalId(); - verify(this.bitstream).setChecksum("ac79653edeb65ab5563585f2d5f14fe9"); - verify(this.bitstream).setChecksumAlgorithm(org.dspace.storage.bitstore.S3BitStoreService.CSA); - verify(this.tm).upload(eq(bucketName), eq(bitStreamId), any(File.class)); - - verifyNoMoreInteractions(this.bitstream, this.tm); - - } - - private Download writeIntoFile(Download download, InvocationOnMock invocation, String content) { - - File file = invocation.getArgument(1, File.class); - - try { - FileUtils.write(file, content, Charset.defaultCharset()); - } catch (IOException e) { - throw new RuntimeException(e); - } - - return download; - } - - private int computeSlashes(String internalId) { - int minimum = internalId.length(); - int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel; - int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel); - int slashes = slashesPerLevel + odd; - return Math.min(slashes, S3BitStoreService.directoryLevels); - } - -} From 4d085503c9c910254d44de249825d3c7a4d7a2a6 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 11 Nov 2022 17:00:17 +0100 Subject: [PATCH 011/686] [DURACOM-92] Fixed dependency convergence error --- dspace-api/pom.xml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index cfc1953d4854..e20fb22f48e7 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -838,6 +838,12 @@ s3mock_2.13 0.2.6 test + + + com.amazonawsl + aws-java-sdk-s3 + + @@ -903,6 +909,11 @@ swagger-core 1.6.2 + + org.scala-lang + scala-library + 2.13.2 + From da778f330665330378e4885175b7c0dd7256585f Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 11 Nov 2022 17:17:12 +0100 Subject: [PATCH 012/686] [DURACOM-92] Fixed dependency convergence error --- dspace-api/pom.xml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index e20fb22f48e7..9c873847e033 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -843,6 +843,10 @@ com.amazonawsl aws-java-sdk-s3 + + com.amazonaws + aws-java-sdk-s3 + From 8e2ada65b191d55bc86002bef10e2a4707cb4d2a Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 6 Dec 2022 12:36:34 +0100 Subject: [PATCH 013/686] 97248: Fix File info Solr plugin to allow faceting --- .../org/dspace/discovery/SolrServiceFileInfoPlugin.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 52e0043ff403..c53b48f80f38 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,10 +53,14 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", bitstream.getName()); } } } @@ -65,4 +69,4 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So } } } -} \ No newline at end of file +} From 3e651af7605853b013fe52607b0701f797090a28 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 6 Dec 2022 12:37:21 +0100 Subject: [PATCH 014/686] 97248: Find DSO based configurations recursively through parent objects --- .../org/dspace/discovery/SearchUtils.java | 45 ++++++++++++----- .../DiscoveryConfigurationService.java | 49 +++++++++++++++++-- .../CollectionIndexFactoryImpl.java | 4 +- .../CommunityIndexFactoryImpl.java | 4 +- .../InprogressSubmissionIndexFactoryImpl.java | 6 +-- .../indexobject/ItemIndexFactoryImpl.java | 2 +- .../repository/DiscoveryRestRepository.java | 10 ++-- 7 files changed, 89 insertions(+), 31 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 90afb09eca99..83cbdeaef6be 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,6 +18,7 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.kernel.ServiceManager; @@ -60,28 +61,32 @@ public static SearchService getSearchService() { } public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, + DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow * + * + * @param context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } @@ -98,6 +103,11 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName( return configurationService.getDiscoveryConfiguration(configurationName); } + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryDSOConfiguration(context, dso); + } public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); @@ -114,45 +124,54 @@ public static List getIgnoredMetadataFields(int type) { * A configuration object can be returned for each parent community/collection * * @param item the DSpace item + * @param context * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Item item, + final Context context) throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); + DiscoveryConfiguration configuration = getDiscoveryConfiguration(context, prefix, collection); result.add(configuration); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 636e7ccd2ae4..b00ff7356376 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,12 +7,20 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; +import org.dspace.core.ReloadableEntity; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,6 +30,8 @@ */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); @@ -41,25 +51,53 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, + IndexableObject dso) { String name; if (dso == null) { name = "default"; } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) dso).getIndexedObject()); } else { name = dso.getUniqueIndexID(); } - return getDiscoveryConfiguration(name); } + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, + DSpaceObject dso) { + String name; + if (dso == null) { + name = "default"; + } else { + name = dso.getHandle(); + } + + DiscoveryConfiguration configuration = getDiscoveryConfiguration(name, false); + if (configuration != null) { + return configuration; + } + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + return getDiscoveryDSOConfiguration(context, parentObject); + } + public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + return getDiscoveryConfiguration(name, true); + } + + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -68,11 +106,12 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { } public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, + final Context context, final IndexableObject dso) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(context, dso); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index c2bacfe5024e..817be7848df7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -86,7 +86,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde final Collection collection = indexableCollection.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -173,4 +173,4 @@ public List getCollectionLocations(Context context, Collection collectio return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java index 8521b7dda0de..e92819601839 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java @@ -69,7 +69,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCommunity index final Community community = indexableObject.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public List getLocations(Context context, IndexableCommunity indexableDS return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index d0b0f363e64b..c3629b6362c3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -73,11 +73,11 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, // Add item metadata List discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index e9f18ae949ab..b417237f763e 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -147,7 +147,7 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI } // Add the item metadata - List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index 52224ef57987..1962d44162e0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public SearchConfigurationRest getSearchConfiguration(final String dsoScope, fin IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public SearchResultsRest getSearchObjects(final String query, final List Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public FacetConfigurationRest getFacetsConfiguration(final String dsoScope, fina IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public FacetResultsRest getFacetObjects(String facetName, String prefix, String IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public SearchResultsRest getAllFacets(String query, List dsoTypes, Strin Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; From 82bc777e45dce2525e2754fc338d27e7630bad1d Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 12:32:15 +0100 Subject: [PATCH 015/686] Fix issue with indexing and add tests --- .../org/dspace/discovery/SearchUtils.java | 32 +- .../discovery/SolrServiceFileInfoPlugin.java | 6 +- .../DiscoveryConfigurationService.java | 9 +- .../InprogressSubmissionIndexFactoryImpl.java | 6 +- .../org/dspace/builder/CommunityBuilder.java | 24 +- .../config/spring/api/discovery.xml | 3198 +++++++++++++++++ .../DiscoveryScopeBasedRestControllerIT.java | 595 +++ .../app/rest/matcher/FacetEntryMatcher.java | 11 + .../app/rest/matcher/FacetValueMatcher.java | 10 + machine.cfg | 19 + 10 files changed, 3889 insertions(+), 21 deletions(-) create mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java create mode 100644 machine.cfg diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 83cbdeaef6be..4085e1bbdf37 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,6 +18,8 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; @@ -72,7 +74,7 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * * * @param context * @param prefix @@ -90,9 +92,28 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con } } + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -128,8 +149,8 @@ public static List getIgnoredMetadataFields(int type) { * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item, - final Context context) throws SQLException { + public static List getAllDiscoveryConfigurations(Item item, Context context) + throws SQLException { List collections = item.getCollections(); return getAllDiscoveryConfigurations(context, null, collections, item); } @@ -171,8 +192,7 @@ private static List getAllDiscoveryConfigurations(final Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(context, prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index c53b48f80f38..6bda2fc52d84 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -59,8 +59,10 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); - document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", bitstream.getName()); - document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", + bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", + bitstream.getName()); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index b00ff7356376..22443aec22e7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -20,7 +20,6 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; -import org.dspace.core.ReloadableEntity; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -135,9 +134,9 @@ public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName( - DiscoveryConfigurationService.class - .getName(), - DiscoveryConfigurationService.class); + DiscoveryConfigurationService.class + .getName(), + DiscoveryConfigurationService.class); for (String key : mainService.getMap().keySet()) { System.out.println(key); @@ -165,7 +164,7 @@ public static void main(String[] args) { System.out.println("Recent submissions configuration:"); DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration - .getRecentSubmissionConfiguration(); + .getRecentSubmissionConfiguration(); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index c3629b6362c3..ebedfc34b743 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -73,9 +73,11 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, // Add item metadata List discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); } else { discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); } diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index 5ba36af8f4a3..1f0e8fbd6617 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { private Community community; + protected CommunityBuilder(Context context) { super(context); } public static CommunityBuilder createCommunity(final Context context) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.create(); + return builder.create(null); + } + public static CommunityBuilder createCommunity(final Context context, String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.create(handle); } - private CommunityBuilder create() { - return createSubCommunity(context, null); + private CommunityBuilder create(String handle) { + return createSubCommunity(context, null, handle); } public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.createSub(parent); + return builder.createSub(parent, null); } - private CommunityBuilder createSub(final Community parent) { + public static CommunityBuilder createSubCommunity(final Context context, final Community parent, + final String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.createSub(parent, handle); + } + + private CommunityBuilder createSub(final Community parent, String handle) { try { - community = communityService.create(parent, context); + community = communityService.create(parent, context, handle); } catch (Exception e) { e.printStackTrace(); return null; @@ -102,6 +113,7 @@ public CommunityBuilder addParentCommunity(final Context context, final Communit @Override public Community build() { try { + communityService.update(context, community); context.dispatchEvents(); diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml new file mode 100644 index 000000000000..6ffcbe661c87 --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml @@ -0,0 +1,3198 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + + withdrawn:true OR discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Publication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Person + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Project + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalIssue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalVolume + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Journal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + + + + + + + + + + + relation.isAuthorOfPublication + + + + + + + + + + + relation.isProjectOfPublication + + + + + + + + + + + + relation.isOrgUnitOfPublication + + + + + + + + + + + relation.isPublicationOfJournalIssue + + + + + + + + + + + relation.isJournalOfPublication + + + + + + + + + + + dc.contributor.author + dc.creator + + + + + + + + + + + + + + + dspace.entity.type + + + + + + + + + + + + + + dc.subject.* + + + + + + + + + + + + + + dc.date.issued + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.type + + + + + + + + + dc.identifier + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + person.jobTitle + + + + + + + + + + + + + + + person.knowsLanguage + + + + + + + + + + + + + person.birthDate + + + + + + + + + + + + + + + + + person.familyName + + + + + + + + + + + person.givenName + + + + + + + + + + + relation.isOrgUnitOfPerson + + + + + + + + + + + relation.isProjectOfPerson + + + + + + + + + + + relation.isPublicationOfAuthor + + + + + + + + + + + + organization.address.addressCountry + + + + + + + + + + + + + + + organization.address.addressLocality + + + + + + + + + + + + + + + organization.foundingDate + + + + + + + + + + + + + + + + organization.legalName + + + + + + + + + + + relation.isPersonOfOrgUnit + + + + + + + + + + + relation.isProjectOfOrgUnit + + + + + + + + + + + relation.isPublicationOfOrgUnit + + + + + + + + + + + creativework.keywords + + + + + + + + + + + + + + + creativework.datePublished + + + + + + + + + + + + + + + + publicationissue.issueNumber + + + + + + + + + + + relation.isPublicationOfJournalIssue + + + + + + + + + + + publicationVolume.volumeNumber + + + + + + + + + + + relation.isIssueOfJournalVolume + + + + + + + + + + + relation.isJournalOfVolume + + + + + + + + + + + creativework.publisher + + + + + + + + + + + + + + + creativework.editor + + + + + + + + + + + + + + + relation.isVolumeOfJournal + + + + + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + relation.isOrgUnitOfProject + + + + + + + + + + + + relation.isPersonOfProject + + + + + + + + + + + + relation.isPublicationOfProject + + + + + + + + + + + relation.isContributorOfPublication + + + + + + + + + + + relation.isPublicationOfContributor + + + + + + + + + + + relation.isFundingAgencyOfProject + + + + + + + + + + + relation.isProjectOfFundingAgency + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java new file mode 100644 index 000000000000..a0edf1a0c70a --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -0,0 +1,595 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.FacetEntryMatcher; +import org.dspace.app.rest.matcher.FacetValueMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + CollectionService collectionService; + + private Community community1; + private Community subcommunity11; + private Community subcommunity12; + private Collection collection111; + private Collection collection112; + private Collection collection121; + private Collection collection122; + + private Community community2; + private Community subcommunity21; + private Community subcommunity22; + private Collection collection211; + private Collection collection212; + private Collection collection221; + private Collection collection222; + + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); + + community1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); + subcommunity11 = CommunityBuilder + .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-1") + .build(); + subcommunity12 = CommunityBuilder + .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-2") + .build(); + collection111 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") + .build(); + collection112 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2") + .build(); + collection121 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1") + .build(); + + collection122 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") + .build(); + + community2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); + + + subcommunity21 = CommunityBuilder + .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-1") + .build(); + subcommunity22 = CommunityBuilder + .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-2") + .build(); + collection211 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") + .build(); + collection212 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2") + .build(); + collection221 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1") + .build(); + collection222 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2") + .build(); + + + Item item111 = ItemBuilder.createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-item111") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111") + .withMetadata("dc", "test", "collection111field", "collection111field-item111") + .withMetadata("dc", "test", "collection121field", "collection121field-item111") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111") + .withMetadata("dc", "test", "collection211field", "collection211field-item111") + .withMetadata("dc", "test", "collection221field", "collection221field-item111") + .build(); + + Item item112 = ItemBuilder.createItem(context, collection112) + .withMetadata("dc", "contributor", "author", "author-item112") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112") + .withMetadata("dc", "test", "collection111field", "collection111field-item112") + .withMetadata("dc", "test", "collection121field", "collection121field-item112") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112") + .withMetadata("dc", "test", "collection211field", "collection211field-item112") + .withMetadata("dc", "test", "collection221field", "collection221field-item112") + .build(); + + Item item121 = ItemBuilder.createItem(context, collection121) + .withMetadata("dc", "contributor", "author", "author-item121") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121") + .withMetadata("dc", "test", "collection111field", "collection111field-item121") + .withMetadata("dc", "test", "collection121field", "collection121field-item121") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121") + .withMetadata("dc", "test", "collection211field", "collection211field-item121") + .withMetadata("dc", "test", "collection221field", "collection221field-item121") + .build(); + + Item item122 = ItemBuilder.createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-item122") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122") + .withMetadata("dc", "test", "collection111field", "collection111field-item122") + .withMetadata("dc", "test", "collection121field", "collection121field-item122") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122") + .withMetadata("dc", "test", "collection211field", "collection211field-item122") + .withMetadata("dc", "test", "collection221field", "collection221field-item122") + .build(); + + Item item211 = ItemBuilder.createItem(context, collection211) + .withMetadata("dc", "contributor", "author", "author-item211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211") + .withMetadata("dc", "test", "collection111field", "collection111field-item211") + .withMetadata("dc", "test", "collection121field", "collection121field-item211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211") + .withMetadata("dc", "test", "collection211field", "collection211field-item211") + .withMetadata("dc", "test", "collection221field", "collection221field-item211") + .build(); + + Item item212 = ItemBuilder.createItem(context, collection212) + .withMetadata("dc", "contributor", "author", "author-item212") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212") + .withMetadata("dc", "test", "collection111field", "collection111field-item212") + .withMetadata("dc", "test", "collection121field", "collection121field-item212") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212") + .withMetadata("dc", "test", "collection211field", "collection211field-item212") + .withMetadata("dc", "test", "collection221field", "collection221field-item212") + .build(); + + Item item221 = ItemBuilder.createItem(context, collection221) + .withMetadata("dc", "contributor", "author", "author-item221") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221") + .withMetadata("dc", "test", "collection111field", "collection111field-item221") + .withMetadata("dc", "test", "collection121field", "collection121field-item221") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221") + .withMetadata("dc", "test", "collection211field", "collection211field-item221") + .withMetadata("dc", "test", "collection221field", "collection221field-item221") + .build(); + + Item item222 = ItemBuilder.createItem(context, collection222) + .withMetadata("dc", "contributor", "author", "author-item222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222") + .withMetadata("dc", "test", "collection111field", "collection111field-item222") + .withMetadata("dc", "test", "collection121field", "collection121field-item222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222") + .withMetadata("dc", "test", "collection211field", "collection211field-item222") + .withMetadata("dc", "test", "collection221field", "collection221field-item222") + .build(); + + Item mappedItem111222 = ItemBuilder + .createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-mappedItem111222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222") + .build(); + + + Item mappedItem122211 = ItemBuilder + .createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-mappedItem122211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211") + .build(); + + + collectionService.addItem(context, collection222, mappedItem111222); + collectionService.addItem(context, collection211, mappedItem122211); + + + context.dispatchEvents(); + context.restoreAuthSystemState(); + } + + @Test + public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community1.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(community1.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item111", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item112", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem111222", + 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + + + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(subcommunity11.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item111", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-mappedItem111222", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection111field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection111field") + .param("scope", String.valueOf(collection111.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection111field", + "collection111field-item111", 1), + FacetValueMatcher.matchEntry("collection111field", + "collection111field-mappedItem111222", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(collection112.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(subcommunity12.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection121field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection121field") + .param("scope", String.valueOf(collection121.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection121field", + "collection121field-item121", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(collection122.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community2.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(subcommunity21.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item211", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection211field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection211field") + .param("scope", String.valueOf(collection211.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection211field", + "collection211field-item211", 1), + FacetValueMatcher.matchEntry("collection211field", + "collection211field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(collection212.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection221field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection221field") + .param("scope", String.valueOf(collection221.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection221field", + "collection221field-item221", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 5e3c477506b9..34b7b8b30d6a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -99,6 +99,17 @@ public static Matcher hasContentInOriginalBundleFacet(boolean ha ); } + public static Matcher matchFacet(String name, String facetType, boolean hasNext) { + return allOf( + hasJsonPath("$.name", is(name)), + hasJsonPath("$.facetType", is(facetType)), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name)) + ); + } + + /** * Check that a facet over the dc.type exists and match the default configuration * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java index a68356da5322..1efafb5406ce 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java @@ -52,6 +52,16 @@ public static Matcher entrySubject(String label, int count) { ); } + public static Matcher matchEntry(String facet, String label, int count) { + return allOf( + hasJsonPath("$.label", is(label)), + hasJsonPath("$.type", is("discover")), + hasJsonPath("$.count", is(count)), + hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")), + hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals")) + ); + } + public static Matcher entrySubject(String label, String authority, int count) { return allOf( diff --git a/machine.cfg b/machine.cfg new file mode 100644 index 000000000000..14f0d1d0b067 --- /dev/null +++ b/machine.cfg @@ -0,0 +1,19 @@ +dspace.shortname = or-platform-7 + +dspace.dir=/Users/yana/dspaces/or-platform-7 + +dspace.server.url =http://localhost:8080/server-or7 +dspace.ui.url = http://localhost:4000 + +# URL for connecting to database +# * Postgres template: jdbc:postgrook naar de toekomst toe wilt dat zeggen dat de backend gewoon in orde is en mogelijk enkel nog eesql://localhost:5432/dspace +# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe +#db.url = ${db.url} +#db.url = jdbc:postgresql://localhost:5432/or-platform-7 +db.url = jdbc:postgresql://localhost:5434/or-platform-7-4 + + + +solr.server = http://localhost:8983/solr + + From c538b9cbedd2d7ab7ab88b912a5eeb75a180e10d Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 14:27:29 +0100 Subject: [PATCH 016/686] Add docs and remove unused site configuration --- .../org/dspace/discovery/SearchUtils.java | 19 ++- .../DiscoveryConfigurationService.java | 15 ++ .../config/spring/api/discovery.xml | 133 +----------------- .../DiscoveryScopeBasedRestControllerIT.java | 56 ++++++-- dspace/config/spring/api/discovery.xml | 129 ----------------- 5 files changed, 77 insertions(+), 275 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 4085e1bbdf37..418720be4a48 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -77,6 +77,7 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con * * * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso @@ -92,6 +93,15 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ public static Set addDiscoveryConfigurationForParents( Context context, Set configurations, String prefix, DSpaceObject dso) throws SQLException { @@ -124,6 +134,13 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName( return configurationService.getDiscoveryConfiguration(configurationName); } + + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( Context context, DSpaceObject dso) { DiscoveryConfigurationService configurationService = getConfigurationService(); @@ -145,7 +162,7 @@ public static List getIgnoredMetadataFields(int type) { * A configuration object can be returned for each parent community/collection * * @param item the DSpace item - * @param context + * @param context the database context * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 22443aec22e7..c0eba58669e9 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -63,6 +63,13 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, return getDiscoveryConfiguration(name); } + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { String name; @@ -91,6 +98,14 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { return getDiscoveryConfiguration(name, true); } + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml index 6ffcbe661c87..e029c65aa000 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml @@ -50,9 +50,6 @@ --> - - - @@ -77,6 +74,7 @@ + @@ -543,121 +541,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index a0edf1a0c70a..15c1019584b1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -29,12 +29,42 @@ import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +/** + * This class tests the correct inheritance of Discovery configurations for sub communities and collections. + * To thoroughly test this, a community and collection structure is set up to where different communities have custom + * configurations configured for them. + * + * The following structure is uses: + * - Parent Community 1 - Custom configuration: discovery-parent-community-1 + * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1 + * -- Collection 111 - Custom configuration: discovery-collection-1-1-1 + * -- Collection 112 + * -- Subcommunity 12 + * -- Collection 121 - Custom configuration: discovery-collection-1-2-1 + * -- Collection 122 + * - Parent Community 2 + * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1 + * -- Collection 211 - Custom configuration: discovery-collection-2-1-1 + * -- Collection 212 + * -- Subcommunity 22 + * -- Collection 221 - Custom configuration: discovery-collection-2-2-1 + * -- Collection 222 + * + * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is + * indexed and provided for the different search scopes. + * + * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222, + * and one in collection 122 and 211. + * + * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to + * power these facets are indexed properly. + */ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { @Autowired CollectionService collectionService; - private Community community1; + private Community parentCommunity1; private Community subcommunity11; private Community subcommunity12; private Collection collection111; @@ -42,7 +72,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg private Collection collection121; private Collection collection122; - private Community community2; + private Community parentCommunity2; private Community subcommunity21; private Community subcommunity22; private Collection collection211; @@ -64,13 +94,13 @@ public void setUp() throws Exception { MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); - community1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") - .build(); + parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); subcommunity11 = CommunityBuilder - .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-1") + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1") .build(); subcommunity12 = CommunityBuilder - .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-2") + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2") .build(); collection111 = CollectionBuilder .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") @@ -86,15 +116,15 @@ public void setUp() throws Exception { .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") .build(); - community2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") - .build(); + parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); subcommunity21 = CommunityBuilder - .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-1") + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1") .build(); subcommunity22 = CommunityBuilder - .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-2") + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2") .build(); collection211 = CollectionBuilder .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") @@ -235,7 +265,7 @@ public void setUp() throws Exception { @Test public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { - getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community1.getID()))) + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.type", is("discover"))) @@ -246,7 +276,7 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { ); getClient().perform(get("/api/discover/facets/parentcommunity1field") - .param("scope", String.valueOf(community1.getID()))) + .param("scope", String.valueOf(parentCommunity1.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._embedded.values", @@ -435,7 +465,7 @@ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { @Test public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { - getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community2.getID()))) + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.type", is("discover"))) diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 4392e02cb3dc..ae1992fbff35 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -50,9 +50,6 @@ --> - - - @@ -534,120 +531,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From 463edac869855150b3bb1c6e2f31c8a97482a633 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 17:08:02 +0100 Subject: [PATCH 017/686] Remove local file --- machine.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 machine.cfg diff --git a/machine.cfg b/machine.cfg deleted file mode 100644 index 14f0d1d0b067..000000000000 --- a/machine.cfg +++ /dev/null @@ -1,19 +0,0 @@ -dspace.shortname = or-platform-7 - -dspace.dir=/Users/yana/dspaces/or-platform-7 - -dspace.server.url =http://localhost:8080/server-or7 -dspace.ui.url = http://localhost:4000 - -# URL for connecting to database -# * Postgres template: jdbc:postgrook naar de toekomst toe wilt dat zeggen dat de backend gewoon in orde is en mogelijk enkel nog eesql://localhost:5432/dspace -# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe -#db.url = ${db.url} -#db.url = jdbc:postgresql://localhost:5432/or-platform-7 -db.url = jdbc:postgresql://localhost:5434/or-platform-7-4 - - - -solr.server = http://localhost:8983/solr - - From 14534b4eafb8f5333440a624f07395b2cb2f14eb Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 17:47:35 +0100 Subject: [PATCH 018/686] Move context to first argument in getDiscoveryConfigurationByNameOrDso --- .../configuration/DiscoveryConfigurationService.java | 4 ++-- .../app/rest/repository/DiscoveryRestRepository.java | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index c0eba58669e9..d7bc3b0f353b 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -119,8 +119,8 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boole return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, - final Context context, + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final Context context, + final String configurationName, final IndexableObject dso) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index 1962d44162e0..e337e76ef253 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public SearchConfigurationRest getSearchConfiguration(final String dsoScope, fin IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public SearchResultsRest getSearchObjects(final String query, final List Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public FacetConfigurationRest getFacetsConfiguration(final String dsoScope, fina IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public FacetResultsRest getFacetObjects(String facetName, String prefix, String IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public SearchResultsRest getAllFacets(String query, List dsoTypes, Strin Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; From 38b30c394c982c4760a8afc9676bfbe139de5e10 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Wed, 14 Dec 2022 10:32:54 +0100 Subject: [PATCH 019/686] Fix openSearchController issue --- .../src/main/java/org/dspace/app/rest/OpenSearchController.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 79ca3817534d..665504139cb3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -176,7 +176,7 @@ public void search(HttpServletRequest request, if (dsoObject != null) { container = scopeResolver.resolveScope(context, dsoObject); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso("site", container); + .getDiscoveryConfiguration(context, container); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( From 69500ad5d579f6891bbf35c35e29b18f120b20e9 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 15 Dec 2022 11:55:05 +0100 Subject: [PATCH 020/686] Fix discovery test config and make ids for relationship profiles unique --- .../config/spring/api/discovery.xml | 779 ++++++++++++++++-- dspace/config/spring/api/discovery.xml | 14 +- 2 files changed, 724 insertions(+), 69 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml index e029c65aa000..a5d7682d4cfd 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml @@ -48,12 +48,15 @@ the key is used to refer to the page (the "site" or a community/collection handle) the value-ref is a reference to an identifier of the DiscoveryConfiguration format --> - - - - - - + + + + + + + + + @@ -61,17 +64,48 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -176,7 +210,145 @@ + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false @@ -313,7 +485,7 @@ - search.resourcetype:Item + search.resourcetype:Item AND latestVersion:true withdrawn:true OR discoverable:false @@ -455,7 +627,7 @@ - search.resourcetype:Item + search.resourcetype:Item AND latestVersion:true @@ -541,10 +713,11 @@ + + class="org.dspace.discovery.configuration.DiscoveryConfiguration" + scope="prototype"> @@ -579,7 +752,7 @@ - search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem @@ -616,8 +789,8 @@ + class="org.dspace.discovery.configuration.DiscoveryConfiguration" + scope="prototype"> @@ -691,8 +864,8 @@ + class="org.dspace.discovery.configuration.DiscoveryConfiguration" + scope="prototype"> @@ -814,7 +987,79 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false @@ -875,7 +1120,8 @@ - search.resourcetype:Item AND entityType_keyword:Person + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false @@ -893,23 +1139,28 @@ - - + + - + + + - - - - - + + + + + + + + @@ -918,7 +1169,10 @@ - + + + + @@ -928,7 +1182,9 @@ - search.resourcetype:Item AND entityType_keyword:Project + + search.resourcetype:Item AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false @@ -946,27 +1202,200 @@ - - + + - - - + - - - - - - + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -990,7 +1419,9 @@ + search.resourcetype:Item AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false @@ -1049,7 +1480,69 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false @@ -1107,7 +1600,68 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false @@ -1165,7 +1719,68 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false @@ -1238,7 +1853,8 @@ - search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + search.resourcetype:Item AND latestVersion:true AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + -withdrawn:true AND -discoverable:false @@ -1293,7 +1909,8 @@ - search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + -withdrawn:true AND -discoverable:false @@ -1302,6 +1919,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + search.entitytype:${researcher-profile.entity-type:Person} + -withdrawn:true AND -discoverable:false + + + + + + + + + @@ -2288,7 +2943,7 @@ - relation.isAuthorOfPublication + relation.isAuthorOfPublication.latestForDiscovery @@ -2299,7 +2954,7 @@ - relation.isProjectOfPublication + relation.isProjectOfPublication.latestForDiscovery @@ -2311,7 +2966,7 @@ - relation.isOrgUnitOfPublication + relation.isOrgUnitOfPublication.latestForDiscovery @@ -2322,7 +2977,7 @@ - relation.isPublicationOfJournalIssue + relation.isPublicationOfJournalIssue.latestForDiscovery @@ -2333,7 +2988,7 @@ - relation.isJournalOfPublication + relation.isJournalOfPublication.latestForDiscovery @@ -2539,7 +3194,7 @@ - relation.isOrgUnitOfPerson + relation.isOrgUnitOfPerson.latestForDiscovery @@ -2550,7 +3205,7 @@ - relation.isProjectOfPerson + relation.isProjectOfPerson.latestForDiscovery @@ -2562,7 +3217,7 @@ - relation.isPublicationOfAuthor + relation.isPublicationOfAuthor.latestForDiscovery @@ -2634,7 +3289,7 @@ - relation.isPersonOfOrgUnit + relation.isPersonOfOrgUnit.latestForDiscovery @@ -2645,7 +3300,7 @@ - relation.isProjectOfOrgUnit + relation.isProjectOfOrgUnit.latestForDiscovery @@ -2657,7 +3312,7 @@ - relation.isPublicationOfOrgUnit + relation.isPublicationOfOrgUnit.latestForDiscovery @@ -2711,7 +3366,7 @@ - relation.isPublicationOfJournalIssue + relation.isPublicationOfJournalIssue.latestForDiscovery @@ -2734,7 +3389,7 @@ - relation.isIssueOfJournalVolume + relation.isIssueOfJournalVolume.latestForDiscovery @@ -2745,7 +3400,7 @@ - relation.isJournalOfVolume + relation.isJournalOfVolume.latestForDiscovery @@ -2786,7 +3441,7 @@ - relation.isVolumeOfJournal + relation.isVolumeOfJournal.latestForDiscovery @@ -2811,7 +3466,7 @@ - relation.isOrgUnitOfProject + relation.isOrgUnitOfProject.latestForDiscovery @@ -2823,7 +3478,7 @@ - relation.isPersonOfProject + relation.isPersonOfProject.latestForDiscovery @@ -2835,7 +3490,7 @@ - relation.isPublicationOfProject + relation.isPublicationOfProject.latestForDiscovery @@ -2846,7 +3501,7 @@ - relation.isContributorOfPublication + relation.isContributorOfPublication.latestForDiscovery @@ -2857,7 +3512,7 @@ - relation.isPublicationOfContributor + relation.isPublicationOfContributor.latestForDiscovery @@ -2868,7 +3523,7 @@ - relation.isFundingAgencyOfProject + relation.isFundingAgencyOfProject.latestForDiscovery @@ -2879,7 +3534,7 @@ - relation.isProjectOfFundingAgency + relation.isProjectOfFundingAgency.latestForDiscovery diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 5e2cae5e9fef..37d5f2548a4e 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -996,7 +996,7 @@ - + @@ -1129,7 +1129,7 @@ - + @@ -1246,7 +1246,7 @@ - + @@ -1366,7 +1366,7 @@ - + @@ -1491,7 +1491,7 @@ - + @@ -1611,7 +1611,7 @@ - + @@ -1730,7 +1730,7 @@ - + From 1300cdc75b25181fdeebda20661aaa02b2d92bfc Mon Sep 17 00:00:00 2001 From: Yury Bondarenko Date: Mon, 19 Dec 2022 11:20:53 +0100 Subject: [PATCH 021/686] 97248: Cache discovery configurations by UUID --- .../DiscoveryConfigurationService.java | 47 +++++++++++++------ 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index d7bc3b0f353b..7d5b435555f9 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -12,6 +12,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -34,6 +35,12 @@ public class DiscoveryConfigurationService { private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by DSO UUID. When a DSO doesn't have its own configuration, we take the one of + * the first parent that does. This cache ensures we don't have to go up the hierarchy every time. + */ + private final Map uuidMap = new HashMap<>(); + public Map getMap() { return map; } @@ -72,26 +79,38 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, */ public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { - String name; + // Fall back to default configuration if (dso == null) { - name = "default"; - } else { - name = dso.getHandle(); + return getDiscoveryConfiguration("default", false); } - DiscoveryConfiguration configuration = getDiscoveryConfiguration(name, false); - if (configuration != null) { - return configuration; + // Attempt to retrieve cached configuration by UUID + if (uuidMap.containsKey(dso.getID())) { + return uuidMap.get(dso.getID()); } - DSpaceObjectService dSpaceObjectService = + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); - DSpaceObject parentObject = null; - try { - parentObject = dSpaceObjectService.getParentObject(context, dso); - } catch (SQLException e) { - log.error(e); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); } - return getDiscoveryDSOConfiguration(context, parentObject); + + // Cache the resulting configuration + uuidMap.put(dso.getID(), configuration); + + return configuration; } public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { From fc9f692bed09f252ab0fcd4d9cd48eb805a7d1f5 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Thu, 12 Jan 2023 17:22:45 +0100 Subject: [PATCH 022/686] [DURACOM-92] Tested and improved S3BitStoreService.about method --- dspace-api/pom.xml | 1 + .../org/dspace/checker/CheckerCommand.java | 2 +- .../storage/bitstore/BaseBitStoreService.java | 32 +++--- .../storage/bitstore/BitStoreService.java | 5 +- .../bitstore/BitstreamStorageServiceImpl.java | 31 ++---- .../storage/bitstore/DSBitStoreService.java | 13 +-- .../storage/bitstore/S3BitStoreService.java | 100 ++++++------------ .../service/BitstreamStorageService.java | 2 +- ...tionTest.java => S3BitStoreServiceIT.java} | 55 +++++++++- 9 files changed, 123 insertions(+), 118 deletions(-) rename dspace-api/src/test/java/org/dspace/storage/bitstore/{S3BitStoreServiceIntegrationTest.java => S3BitStoreServiceIT.java} (87%) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 9c873847e033..814add2085a6 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -917,6 +917,7 @@ org.scala-lang scala-library 2.13.2 + test diff --git a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java index 6b16d51bfe1e..87b0de4a6571 100644 --- a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java +++ b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java @@ -245,7 +245,7 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { info.setProcessStartDate(new Date()); try { - Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); + Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); if (MapUtils.isNotEmpty(checksumMap)) { info.setBitstreamFound(true); if (checksumMap.containsKey("checksum")) { diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java index 209c1e21e74d..5b367d7a8136 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java @@ -14,6 +14,8 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; @@ -153,22 +155,24 @@ protected boolean isLonger(String internalId, int endIndex) { * Retrieves a map of useful metadata about the File (size, checksum, modified) * * @param file The File to analyze - * @param attrs The map where we are storing values + * @param attrs The list of requested metadata values * @return Map of updated metadatas / attrs * @throws IOException */ - public Map about(File file, Map attrs) throws IOException { + public Map about(File file, List attrs) throws IOException { + + Map metadata = new HashMap(); + try { if (file != null && file.exists()) { - this.putValueIfExistsKey(attrs, SIZE_BYTES, file.length()); - if (attrs.containsKey(CHECKSUM)) { - attrs.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); - attrs.put(CHECKSUM_ALGORITHM, CSA); + this.putValueIfExistsKey(attrs, metadata, SIZE_BYTES, file.length()); + if (attrs.contains(CHECKSUM)) { + metadata.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); + metadata.put(CHECKSUM_ALGORITHM, CSA); } - this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(file.lastModified())); - return attrs; + this.putValueIfExistsKey(attrs, metadata, MODIFIED, String.valueOf(file.lastModified())); } - return null; + return metadata; } catch (Exception e) { log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e); throw new IOException(e); @@ -204,13 +208,9 @@ private byte[] generateChecksumFrom(FileInputStream fis) throws IOException, NoS } } - protected void putValueIfExistsKey(Map attrs, String key, Object value) { - this.putEntryIfExistsKey(attrs, key, Map.entry(key, value)); - } - - protected void putEntryIfExistsKey(Map attrs, String key, Map.Entry entry) { - if (attrs.containsKey(key)) { - attrs.put(entry.getKey(), entry.getValue()); + protected void putValueIfExistsKey(List attrs, Map metadata, String key, Object value) { + if (attrs.contains(key)) { + metadata.put(key, value); } } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java index b6ac540c5047..5a02ad1d5617 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.List; import java.util.Map; import org.dspace.content.Bitstream; @@ -62,13 +63,13 @@ public interface BitStoreService { * Obtain technical metadata about an asset in the asset store. * * @param bitstream The bitstream to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException; + public Map about(Bitstream bitstream, List attrs) throws IOException; /** * Remove an asset from the asset store. diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java index 0bd71088da44..3124fd89ca21 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java @@ -165,12 +165,9 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, bitstream.setStoreNumber(assetstore); bitstreamService.update(context, bitstream); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); + List wantedMetadata = List.of("size_bytes", "checksum", "checksum_algorithm"); + Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); - Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); if (MapUtils.isEmpty(receivedMetadata)) { String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath; log.error(message); @@ -200,13 +197,8 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, } @Override - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { - Map wantedMetadata = new HashMap(); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); - - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); - return receivedMetadata; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { + return this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("checksum", "checksum_algorithm")); } @Override @@ -232,10 +224,9 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio List storage = bitstreamService.findDeletedBitstreams(context); for (Bitstream bitstream : storage) { UUID bid = bitstream.getID(); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("modified", null); - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + List wantedMetadata = List.of("size_bytes", "modified"); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()) + .about(bitstream, wantedMetadata); // Make sure entries which do not exist are removed @@ -328,13 +319,11 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio @Nullable @Override public Long getLastModified(Bitstream bitstream) throws IOException { - Map attrs = new HashMap(); - attrs.put("modified", null); - attrs = this.getStore(bitstream.getStoreNumber()).about(bitstream, attrs); - if (attrs == null || !attrs.containsKey("modified")) { + Map metadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("modified")); + if (metadata == null || !metadata.containsKey("modified")) { return null; } - return Long.valueOf(attrs.get("modified").toString()); + return Long.valueOf(metadata.get("modified").toString()); } /** diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java index 1fdf1e84e115..6fef7365e482 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java @@ -15,6 +15,7 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.List; import java.util.Map; import org.apache.logging.log4j.Logger; @@ -126,13 +127,13 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { /** * Obtain technical metadata about an asset in the asset store. * - * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields - * @return attrs - * A Map with key/value pairs of desired metadata - * @throws java.io.IOException If a problem occurs while obtaining metadata + * @param bitstream The asset to describe + * @param attrs A List of desired metadata fields + * @return attrs A Map with key/value pairs of desired metadata + * @throws java.io.IOException If a problem occurs while obtaining + * metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { try { // potentially expensive, since it may calculate the checksum File file = getFile(bitstream); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index 622308b00d9f..d2c9839905cd 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -7,6 +7,8 @@ */ package org.dspace.storage.bitstore; +import static java.lang.String.valueOf; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -14,6 +16,8 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; @@ -26,7 +30,6 @@ import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; @@ -42,6 +45,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; @@ -208,7 +212,7 @@ public void init() throws IOException { } try { - if (!s3Service.doesBucketExist(bucketName)) { + if (!s3Service.doesBucketExistV2(bucketName)) { s3Service.createBucket(bucketName); log.info("Creating new S3 Bucket: " + bucketName); } @@ -294,10 +298,6 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { ) { Utils.bufferedCopy(dis, fos); in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - ObjectMetadata objMetadata = new ObjectMetadata(); - objMetadata.setContentMD5(md5Base64); Upload upload = tm.upload(bucketName, key, scratchFile); @@ -306,7 +306,7 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { bitstream.setSizeBytes(scratchFile.length()); // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if // the bucket is encrypted - bitstream.setChecksum(Utils.toHex(md5Digest)); + bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); bitstream.setChecksumAlgorithm(CSA); } catch (AmazonClientException | IOException | InterruptedException e) { @@ -329,86 +329,53 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { * (Does not use getContentMD5, as that is 128-bit MD5 digest calculated on caller's side) * * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ @Override - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { + String key = getFullKey(bitstream.getInternalId()); // If this is a registered bitstream, strip the -R prefix before retrieving if (isRegisteredBitstream(key)) { key = key.substring(REGISTERED_FLAG.length()); } + + Map metadata = new HashMap<>(); + try { + ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); if (objectMetadata != null) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", objectMetadata.getContentLength()); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); - } + putValueIfExistsKey(attrs, metadata, "size_bytes", objectMetadata.getContentLength()); + putValueIfExistsKey(attrs, metadata, "modified", valueOf(objectMetadata.getLastModified().getTime())); } - try ( - InputStream in = get(bitstream); - // Read through a digest input stream that will work out the MD5 - DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); - ) { - in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - attrs.put("checksum", md5Base64); - attrs.put("checksum_algorithm", CSA); - } catch (NoSuchAlgorithmException nsae) { - // Should never happen - log.warn("Caught NoSuchAlgorithmException", nsae); + + putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA); + + if (attrs.contains("checksum")) { + try (InputStream in = get(bitstream)) { + byte[] md5Digest = MessageDigest.getInstance(CSA).digest(IOUtils.toByteArray(in)); + metadata.put("checksum", Base64.encodeBase64String(md5Digest)); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); + } } - return attrs; + + return metadata; } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { - return null; + return metadata; } } catch (AmazonClientException e) { log.error("about(" + key + ", attrs)", e); throw new IOException(e); } - return null; - } - - private boolean isMD5Checksum(String eTag) { - // if the etag is NOT an MD5 it end with -x where x is the number of part used in the multipart upload - return StringUtils.contains(eTag, "-"); - } - - /** - * Populates map values by checking key existence - *
- * Adds technical metadata about an asset in the asset store, like: - *
    - *
  • size_bytes
  • - *
  • checksum
  • - *
  • checksum_algorithm
  • - *
  • modified
  • - *
- * - * @param objectMetadata containing technical data - * @param attrs map with keys populated - * @return Map of enriched attrs with values - */ - public Map about(ObjectMetadata objectMetadata, Map attrs) { - if (objectMetadata != null) { - this.putValueIfExistsKey(attrs, SIZE_BYTES, objectMetadata.getContentLength()); - - // put CHECKSUM_ALGORITHM if exists CHECKSUM - this.putValueIfExistsKey(attrs, CHECKSUM, objectMetadata.getETag()); - this.putEntryIfExistsKey(attrs, CHECKSUM, Map.entry(CHECKSUM_ALGORITHM, CSA)); - - this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(objectMetadata.getLastModified().getTime())); - } - return attrs; + return metadata; } /** @@ -572,13 +539,14 @@ public static void main(String[] args) throws Exception { String accessKey = command.getOptionValue("a"); String secretKey = command.getOptionValue("s"); - String assetFile = command.getOptionValue("f"); S3BitStoreService store = new S3BitStoreService(); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); - store.s3Service = new AmazonS3Client(awsCredentials); + store.s3Service = AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)) + .build(); //Todo configurable region Region usEast1 = Region.getRegion(Regions.US_EAST_1); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java index 209ef5d16be6..7f5ed8f9129f 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java @@ -102,7 +102,7 @@ public interface BitstreamStorageService { public UUID register(Context context, Bitstream bitstream, int assetstore, String bitstreamPath) throws SQLException, IOException, AuthorizeException; - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; /** * Does the internal_id column in the bitstream row indicate the bitstream diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java similarity index 87% rename from dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java rename to dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java index f362e94dddc7..7e14f82be289 100644 --- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java @@ -13,6 +13,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -27,6 +28,8 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.sql.SQLException; +import java.util.List; +import java.util.Map; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.AnonymousAWSCredentials; @@ -37,6 +40,7 @@ import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.services.s3.model.ObjectMetadata; import io.findify.s3mock.S3Mock; +import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.dspace.AbstractIntegrationTestWithDatabase; @@ -60,7 +64,7 @@ /** * @author Luca Giamminonni (luca.giamminonni at 4science.com) */ -public class S3BitStoreServiceIntegrationTest extends AbstractIntegrationTestWithDatabase { +public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase { private static final String DEFAULT_BUCKET_NAME = "dspace-asset-localhost"; @@ -122,7 +126,7 @@ public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException s3BitStoreService.put(bitstream, toInputStream(content)); - String expectedChecksum = generateChecksum(content); + String expectedChecksum = Utils.toHex(generateChecksum(content)); assertThat(bitstream.getSizeBytes(), is((long) content.length())); assertThat(bitstream.getChecksum(), is(expectedChecksum)); @@ -153,7 +157,7 @@ public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException { s3BitStoreService.put(bitstream, toInputStream(content)); - String expectedChecksum = generateChecksum(content); + String expectedChecksum = Utils.toHex(generateChecksum(content)); assertThat(bitstream.getSizeBytes(), is((long) content.length())); assertThat(bitstream.getChecksum(), is(expectedChecksum)); @@ -214,6 +218,47 @@ public void testBitstreamDeletion() throws IOException { } + @Test + public void testAbout() throws IOException { + + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + Map about = s3BitStoreService.about(bitstream, List.of()); + assertThat(about.size(), is(0)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about.size(), is(1)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about.size(), is(2)); + + String expectedChecksum = Base64.encodeBase64String(generateChecksum(content)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about, hasEntry("checksum", expectedChecksum)); + assertThat(about.size(), is(3)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum", "checksum_algorithm")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about, hasEntry("checksum", expectedChecksum)); + assertThat(about, hasEntry("checksum_algorithm", CSA)); + assertThat(about.size(), is(4)); + + } + @Test public void handleRegisteredIdentifierPrefixInS3() { String trueBitStreamId = "012345"; @@ -338,11 +383,11 @@ public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRem assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator))); } - private String generateChecksum(String content) { + private byte[] generateChecksum(String content) { try { MessageDigest m = MessageDigest.getInstance("MD5"); m.update(content.getBytes()); - return Utils.toHex(m.digest()); + return m.digest(); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } From b9f5bf9f8972a74bcb8adfa3656109a37d3b2722 Mon Sep 17 00:00:00 2001 From: eskander Date: Mon, 6 Feb 2023 14:38:46 +0200 Subject: [PATCH 023/686] [DSC-879] Identifiers Signature should check all item types --- dspace/config/spring/api/deduplication.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/dspace/config/spring/api/deduplication.xml b/dspace/config/spring/api/deduplication.xml index bdf2747f47b3..7dd4f1e7bd20 100644 --- a/dspace/config/spring/api/deduplication.xml +++ b/dspace/config/spring/api/deduplication.xml @@ -33,6 +33,7 @@ + @@ -48,6 +49,7 @@ +
@@ -55,6 +57,7 @@ + @@ -62,6 +65,7 @@ + @@ -78,6 +82,7 @@ + From 69d124603b256e2ddb0de93892a88f41c17d4e9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 10 Feb 2023 15:35:54 +0000 Subject: [PATCH 024/686] Downgrade stylesheet version Due to an warning message reported in: #8661 --- .../src/main/resources/org/dspace/license/LicenseCleanup.xsl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl index 84c62158fe75..d9a9745a1b10 100644 --- a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - - \ No newline at end of file + From e4fcfdada6cc1f8e0950785d0feaab54b3869afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 10 Feb 2023 15:43:44 +0000 Subject: [PATCH 025/686] Downgrade stylesheet version Due to an warning message reported in: #8661 --- .../src/main/resources/org/dspace/license/CreativeCommons.xsl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl index f32942a302a2..d9f6cd361434 100644 --- a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - @@ -47,4 +47,4 @@ - \ No newline at end of file + From 191d3700bb083129c7bd925be2a16df852f9049e Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Thu, 9 Feb 2023 14:45:42 +0100 Subject: [PATCH 026/686] [CST-6402] manage saf import via remote url --- .../org/dspace/app/itemimport/ItemImport.java | 27 ++++++++++++++++--- .../ItemImportScriptConfiguration.java | 6 ++++- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index 6870b94eee1d..2b1089e3e01f 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -11,6 +11,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.net.URL; import java.nio.file.Files; import java.sql.SQLException; import java.util.ArrayList; @@ -74,6 +75,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean isQuiet = false; protected boolean commandLineCollections = false; protected boolean zip = false; + protected boolean remoteUrl = false; protected String zipfilename = null; protected boolean help = false; protected File workDir = null; @@ -253,6 +255,17 @@ public void internalRun() throws Exception { * @param context */ protected void validate(Context context) { + // check zip type: uploaded file or remote url + if (commandLine.hasOption('z')) { + zipfilename = commandLine.getOptionValue('z'); + } else if (commandLine.hasOption('u')) { + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } + if (StringUtils.isBlank(zipfilename)) { + throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file"); + } + if (command == null) { handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); throw new UnsupportedOperationException("Must run with either add, replace, or remove"); @@ -306,17 +319,24 @@ protected void process(Context context, ItemImportService itemImportService, * @throws Exception */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { - Optional optionalFileStream = handler.getFileStream(context, zipfilename); + Optional optionalFileStream = Optional.empty(); + if (!remoteUrl) { + // manage zip via upload + optionalFileStream = handler.getFileStream(context, zipfilename); + } else { + // manage zip via remote url + optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + } if (optionalFileStream.isPresent()) { workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } /** @@ -356,7 +376,6 @@ protected void setMapFile() throws IOException { */ protected void setZip() { zip = true; - zipfilename = commandLine.getOptionValue('z'); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index a3149040c49b..cfe97ad89bd6 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -64,7 +64,11 @@ public Options getOptions() { options.addOption(Option.builder("z").longOpt("zip") .desc("name of zip file") .type(InputStream.class) - .hasArg().required().build()); + .hasArg().build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .type(InputStream.class) + .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") .hasArg().required(false).build()); From 3aa3a945afc6070e89a250328c2d68e56bb267b1 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 17 Feb 2023 10:12:27 -0600 Subject: [PATCH 027/686] [maven-release-plugin] prepare for next development iteration --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index b02778f7bbcf..d33387859fb3 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index b79e3954f931..7e26e22fa2ea 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 0bd38a139043..27efba73d069 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 0a4aa46cbd35..95354621aa13 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index df97a13ffc99..7fdf21ef4ce6 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - 7.5 + 7.6-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 251f36c026af..99aa88bebf0c 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 362027d3da15..f3112b049bdd 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 77c3fdb4c4a0..f5ef7e01d82a 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 4b20c40898a9..35206d6ee291 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index e71cb6e585a3..8f5a6f84f492 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 45e78a39d787..b60246ba6cc9 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index b1a51b33be96..a7c9b5922c62 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index ce301196793e..9b696fa0cbda 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - 7.5 + 7.6-SNAPSHOT .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 3198433c8d71..7916648e4784 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - 7.5 + 7.6-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 5fc99b9081be..00463e300791 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - 7.5 + 7.6-SNAPSHOT DSpace Parent Project DSpace open source software is a turnkey institutional repository application. @@ -872,14 +872,14 @@ org.dspace dspace-rest - 7.5 + 7.6-SNAPSHOT jar classes org.dspace dspace-rest - 7.5 + 7.6-SNAPSHOT war @@ -1030,69 +1030,69 @@ org.dspace dspace-api - 7.5 + 7.6-SNAPSHOT org.dspace dspace-api test-jar - 7.5 + 7.6-SNAPSHOT test org.dspace.modules additions - 7.5 + 7.6-SNAPSHOT org.dspace dspace-sword - 7.5 + 7.6-SNAPSHOT org.dspace dspace-swordv2 - 7.5 + 7.6-SNAPSHOT org.dspace dspace-oai - 7.5 + 7.6-SNAPSHOT org.dspace dspace-services - 7.5 + 7.6-SNAPSHOT org.dspace dspace-server-webapp test-jar - 7.5 + 7.6-SNAPSHOT test org.dspace dspace-rdf - 7.5 + 7.6-SNAPSHOT org.dspace dspace-iiif - 7.5 + 7.6-SNAPSHOT org.dspace dspace-server-webapp - 7.5 + 7.6-SNAPSHOT jar classes org.dspace dspace-server-webapp - 7.5 + 7.6-SNAPSHOT war @@ -1932,7 +1932,7 @@ scm:git:git@github.com:DSpace/DSpace.git scm:git:git@github.com:DSpace/DSpace.git git@github.com:DSpace/DSpace.git - dspace-7.5 + HEAD From 358e2b5b4117cddd9bee1f237ddcf7b2048eba91 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Fri, 17 Feb 2023 10:21:34 -0600 Subject: [PATCH 028/686] restores member variables used in sharding functions --- .../statistics/SolrLoggerServiceImpl.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index a9525203854e..773badc41c58 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -17,9 +17,12 @@ import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; +import java.net.URI; import java.net.URLEncoder; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; import java.sql.SQLException; import java.text.DateFormat; import java.text.ParseException; @@ -174,6 +177,23 @@ protected SolrLoggerServiceImpl() { @Override public void afterPropertiesSet() throws Exception { + statisticsCoreURL = configurationService.getProperty("solr-statistics.server"); + + if (null != statisticsCoreURL) { + Path statisticsPath = Paths.get(new URI(statisticsCoreURL).getPath()); + statisticsCoreBase = statisticsPath + .getName(statisticsPath.getNameCount() - 1) + .toString(); + } else { + log.warn("Unable to find solr-statistics.server parameter in DSpace configuration. This is required for " + + "sharding statistics."); + statisticsCoreBase = null; + } + + log.info("solr-statistics.server: {}", statisticsCoreURL); + log.info("usage-statistics.dbfile: {}", + configurationService.getProperty("usage-statistics.dbfile")); + solr = solrStatisticsCore.getSolr(); // Read in the file so we don't have to do it all the time From b0567aa22490d273fe8c607e1d29ca1c5e278cb5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Feb 2023 06:56:19 +0000 Subject: [PATCH 029/686] Bump commons-fileupload from 1.3.3 to 1.5 Bumps commons-fileupload from 1.3.3 to 1.5. --- updated-dependencies: - dependency-name: commons-fileupload:commons-fileupload dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 00463e300791..3d9e6851d1d0 100644 --- a/pom.xml +++ b/pom.xml @@ -1484,7 +1484,7 @@ commons-fileupload commons-fileupload - 1.3.3 + 1.5 commons-io From 1acdc55104941d7fd12e787fee37bd54ba6bfdda Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Thu, 12 Jan 2023 16:44:20 -0500 Subject: [PATCH 030/686] Improve ResourcePolicy documentation. --- .../org/dspace/authorize/ResourcePolicy.java | 7 ++ .../org/dspace/authorize/package-info.java | 74 +++++++++++++++++++ .../java/org/dspace/authorize/package.html | 68 ----------------- .../service/ResourcePolicyService.java | 28 +++++-- 4 files changed, 104 insertions(+), 73 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/authorize/package-info.java delete mode 100644 dspace-api/src/main/java/org/dspace/authorize/package.html diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index 954bb9699038..38b6aef45bc2 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -41,9 +41,16 @@ @Entity @Table(name = "resourcepolicy") public class ResourcePolicy implements ReloadableEntity { + /** This policy was set on submission, to give the submitter access. */ public static String TYPE_SUBMISSION = "TYPE_SUBMISSION"; + + /** This policy was set to allow access by a workflow group. */ public static String TYPE_WORKFLOW = "TYPE_WORKFLOW"; + + /** This policy was explicitly set on this object. */ public static String TYPE_CUSTOM = "TYPE_CUSTOM"; + + /** This policy was copied from the containing object's default policies. */ public static String TYPE_INHERITED = "TYPE_INHERITED"; @Id diff --git a/dspace-api/src/main/java/org/dspace/authorize/package-info.java b/dspace-api/src/main/java/org/dspace/authorize/package-info.java new file mode 100644 index 000000000000..df608abc1843 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/package-info.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +/** + * Represents permissions for access to DSpace content. + * + *

Philosophy

+ * DSpace's authorization system follows the classical "police state" + * philosophy of security - the user can do nothing, unless it is + * specifically allowed. Those permissions are spelled out with + * {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table + * in the database. + * + *

Policies are attached to Content

+ * Resource Policies get assigned to all of the content objects in + * DSpace - collections, communities, items, bundles, and bitstreams. + * (Currently they are not attached to non-content objects such as + * {@code EPerson} or {@code Group}. But they could be, hence the name + * {@code ResourcePolicy} instead of {@code ContentPolicy}.) + * + *

Policies are tuples

+ * Authorization is based on evaluating the tuple of (object, action, actor), + * such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson} + * "John Smith" can read an item. {@code ResourcePolicy} objects are pretty + * simple, describing a single instance of (object, action, actor). If multiple + * actors are desired, such as groups 10, 11, and 12 are allowed to READ Item + * 13, you simply create a {@code ResourcePolicy} for each group. + * + *

Built-in groups

+ * The install process should create two built-in groups - {@code Anonymous} for + * anonymous/public access, and {@code Administrators} for administrators. + * Group {@code Anonymous} allows anyone access, even if not authenticated. + * Group {@code Administrators}' members have super-user rights, + * and are allowed to do any action to any object. + * + *

Policy types + * Policies have a "type" used to distinguish policies which are applied for + * specific purposes. + *
+ *
CUSTOM
+ *
These are created and assigned explicitly by users.
+ *
INHERITED
+ *
These are copied from a containing object's default policies.
+ *
SUBMISSION
+ *
These are applied during submission to give the submitter access while + * composing a submission.
+ *
WORKFLOW
+ *
These are automatically applied during workflow, to give curators access + * to submissions in their curation queues. They usually have an + * automatically-created workflow group as the actor.
+ * + *

Start and End dates

+ * A policy may have a start date and/or an end date. The policy is considered + * not valid before the start date or after the end date. No date means do not + * apply the related test. For example, embargo until a given date can be + * expressed by a READ policy with a given start date, and a limited-time offer + * by a READ policy with a given end date. + * + *

Unused ResourcePolicy attributes

+ * {@code ResourcePolicy} has a few attributes that are currently unused, + * but are included with the intent that they will be used someday. + * One is the {@code EPerson} - policies could apply to only a single EPerson, + * but for ease of administration currently a Group is the recommended unit to + * use to describe the actor. + * + * @author dstuve + * @author mwood + */ +package org.dspace.authorize; diff --git a/dspace-api/src/main/java/org/dspace/authorize/package.html b/dspace-api/src/main/java/org/dspace/authorize/package.html deleted file mode 100644 index 66ce0f824773..000000000000 --- a/dspace-api/src/main/java/org/dspace/authorize/package.html +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - -

Handles permissions for DSpace content. -

- -

Philosophy
-DSpace's authorization system follows the classical "police state" -philosophy of security - the user can do nothing, unless it is -specifically allowed. Those permissions are spelled out with -ResourcePolicy objects, stored in the resourcepolicy table in the -database. -

- -

Policies are attached to Content

-

Policies are attached to Content
-Resource Policies get assigned to all of the content objects in -DSpace - collections, communities, items, bundles, and bitstreams. -(Currently they are not attached to non-content objects such as EPerson -or Group. But they could be, hence the name ResourcePolicy instead of -ContentPolicy.) -

- -

Policies are tuples

-Authorization is based on evaluating the tuple of (object, action, who), -such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith" -can read an item. ResourcePolicy objects are pretty simple, describing a single instance of -(object, action, who). If multiple who's are desired, such as Groups 10, 11, and -12 are allowed to READ Item 13, you simply create a ResourcePolicy for each -group. -

- -

Special Groups

-The install process should create two special groups - group 0, for -anonymous/public access, and group 1 for administrators. -Group 0 (public/anonymous) allows anyone access, even if they are not -authenticated. Group 1's (admin) members have super-user rights, and -are allowed to do any action to any object. -

- -

Unused ResourcePolicy attributes

-ResourcePolicies have a few attributes that are currently unused, -but are included with the intent that they will be used someday. -One is start and end dates, for when policies will be active, so that -permissions for content can change over time. The other is the EPerson - -policies could apply to only a single EPerson, but for ease of -administration currently a Group is the recommended unit to use to -describe 'who'. -

- - - diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java index f1d8b30242a7..726078d74382 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java @@ -53,12 +53,19 @@ public List find(Context c, EPerson e, List groups, int a throws SQLException; /** - * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID. - * This method can be used to detect duplicate ResourcePolicies. + * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring + * IDs with a specific PolicyID. This method can be used to detect duplicate + * ResourcePolicies. * - * @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies. - * @return List of resource policies for the same DSpaceObject, group and action but other policyID. - * @throws SQLException + * @param context current DSpace session. + * @param dso find policies for this object. + * @param group find policies referring to this group. + * @param action find policies for this action. + * @param notPolicyID ResourcePolicies with this ID will be ignored while + * looking out for equal ResourcePolicies. + * @return List of resource policies for the same DSpaceObject, group and + * action but other policyID. + * @throws SQLException passed through. */ public List findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) @@ -68,6 +75,16 @@ public List findByTypeGroupActionExceptId(Context context, DSpac public boolean isDateValid(ResourcePolicy resourcePolicy); + /** + * Create and persist a copy of a given ResourcePolicy, with an empty + * dSpaceObject field. + * + * @param context current DSpace session. + * @param resourcePolicy the policy to be copied. + * @return the copy. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException; public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException; @@ -117,6 +134,7 @@ public List findExceptRpType(Context c, DSpaceObject o, int acti * @param ePerson ePerson whose policies want to find * @param offset the position of the first result to return * @param limit paging limit + * @return some of the policies referring to {@code ePerson}. * @throws SQLException if database error */ public List findByEPerson(Context context, EPerson ePerson, int offset, int limit) From 47fab88c17d8ac821f99c0d597cc6e8d3c153a8a Mon Sep 17 00:00:00 2001 From: Kristof De Langhe Date: Fri, 24 Feb 2023 17:30:28 +0100 Subject: [PATCH 031/686] 89779: VersionedHandleIdentifierProviderWithCanonicalHandles fix pt1 --- .../spring/spring-dspace-core-services.xml | 15 --- ...VersionedHandleIdentifierProviderTest.java | 100 ++++++++++++++++++ .../config/spring/api/identifier-service.xml | 2 - 3 files changed, 100 insertions(+), 17 deletions(-) create mode 100644 dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java diff --git a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml index 87bfcbc86c98..3ce641d99c34 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml @@ -13,15 +13,6 @@ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> - - - - @@ -31,12 +22,6 @@ - - - - diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java new file mode 100644 index 000000000000..9db55bb31234 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java @@ -0,0 +1,100 @@ +package org.dspace.identifier; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.VersionBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.identifier.service.IdentifierService; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class VersionedHandleIdentifierProviderTest extends AbstractIntegrationTestWithDatabase { + private ServiceManager serviceManager; + + private String handlePrefix; + + private Collection collection; + private Item itemV1; + private Item itemV2; + private Item itemV3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + ConfigurationService configurationService = new DSpace().getConfigurationService(); + handlePrefix = configurationService.getProperty("handle.prefix"); + + serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + } + + private void registerProvider(Class type) { + // Register our new provider + serviceManager.registerServiceClass(type.getName(), type); + IdentifierProvider identifierProvider = + (IdentifierProvider) serviceManager.getServiceByName(type.getName(), type); + + // Overwrite the identifier-service's providers with the new one to ensure only this provider is used + IdentifierServiceImpl identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0); + identifierService.setProviders(List.of(identifierProvider)); + } + + private void createVersions() throws SQLException, AuthorizeException { + itemV1 = ItemBuilder.createItem(context, collection) + .withTitle("First version") + .build(); + itemV2 = VersionBuilder.createVersion(context, itemV1, "Second version").build().getItem(); + itemV3 = VersionBuilder.createVersion(context, itemV1, "Third version").build().getItem(); + } + + @Test + public void testDefaultVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProvider.class); + createVersions(); + + assertEquals(handlePrefix + "/1", itemV1.getHandle()); + assertEquals(handlePrefix + "/1.2", itemV2.getHandle()); + assertEquals(handlePrefix + "/1.3", itemV3.getHandle()); + } + + @Test + public void testCanonicalVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class); + createVersions(); + + assertEquals(handlePrefix + "/1.3", itemV1.getHandle()); + assertEquals(handlePrefix + "/1.2", itemV2.getHandle()); + assertEquals(handlePrefix + "/1", itemV3.getHandle()); + } + + @After + @Override + public void destroy() throws Exception { + super.destroy(); + // serviceManager.getApplicationContext().refresh(); + } +} diff --git a/dspace/config/spring/api/identifier-service.xml b/dspace/config/spring/api/identifier-service.xml index e9f08003bd63..dd5716a62e9b 100644 --- a/dspace/config/spring/api/identifier-service.xml +++ b/dspace/config/spring/api/identifier-service.xml @@ -17,11 +17,9 @@ The VersionedHandleIdentifierProvider creates a new versioned handle for every new version. --> - @ - - org.dspace.app.rest.Application - - org.apache.maven.plugins - maven-war-plugin - - true - - true - - - - prepare-package - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - - test-jar - - - - - - - com.mycila - license-maven-plugin - - - **/src/test/resources/** - **/src/test/data/** - - src/main/webapp/index.html - src/main/webapp/login.html - src/main/webapp/styles.css - src/main/webapp/js/hal/** - src/main/webapp/js/vendor/** - - - + + org.dspace + dspace-parent + cris-2022.03.01-SNAPSHOT + .. + + + + + ${basedir}/.. + + @ + + + + + + org.dspace.modules + additions + + + org.dspace + dspace-server-webapp + + + org.apache.solr + solr-solrj + + + + + org.dspace + dspace-api + test-jar + test + + + org.dspace + dspace-server-webapp + test-jar + test + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.security + spring-security-test + ${spring-security.version} + test + + + com.jayway.jsonpath + json-path-assert + ${json-path.version} + test + + + junit + junit + test + + + com.h2database + h2 + test + + + org.mockito + mockito-inline + test + + + + + org.apache.solr + solr-core + ${solr.client.version} + test + + + + org.apache.commons + commons-text + + + + + org.apache.lucene + lucene-analyzers-icu + test + + + + + + + + + com.mycila + license-maven-plugin + + + **/src/test/resources/** + **/src/test/data/** + + src/main/resources/static/index.html + src/main/resources/static/login.html + src/main/resources/static/styles.css + src/main/resources/static/js/hal/** + src/main/resources/static/js/vendor/** + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java b/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java new file mode 100644 index 000000000000..90039887f862 --- /dev/null +++ b/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app; + +import org.dspace.app.rest.WebApplication; +import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; +import org.dspace.app.rest.utils.DSpaceKernelInitializer; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; + +@SpringBootApplication(scanBasePackageClasses = WebApplication.class) +public class Application extends SpringBootServletInitializer { + + public static void main(String[] args) { + new SpringApplicationBuilder(Application.class) + .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()) + .run(args); + } + + /** + * Override the default SpringBootServletInitializer.configure() method, + * passing it this Application class. + *

+ * This is necessary to allow us to build a deployable WAR, rather than + * always relying on embedded Tomcat. + *

+ * See: http://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#howto-create-a-deployable-war-file + * + * @param application + * @return + */ + @Override + protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { + // Pass this Application class, and our initializers for DSpace Kernel and Configuration + // NOTE: Kernel must be initialized before Configuration + return application.sources(Application.class) + .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()); + } +} diff --git a/dspace-server-webapp/src/main/resources/application.properties b/dspace-webapp-boot/src/main/resources/application.properties similarity index 99% rename from dspace-server-webapp/src/main/resources/application.properties rename to dspace-webapp-boot/src/main/resources/application.properties index a10e0f98a00d..8dd8e2903cc6 100644 --- a/dspace-server-webapp/src/main/resources/application.properties +++ b/dspace-webapp-boot/src/main/resources/application.properties @@ -27,6 +27,7 @@ # http://docs.spring.io/spring-boot/docs/current/reference/html/common-application-properties.html # +server.servlet.context-path=/server ######################## # DSpace Settings # diff --git a/dspace-server-webapp/src/main/webapp/index.html b/dspace-webapp-boot/src/main/resources/static/index.html similarity index 100% rename from dspace-server-webapp/src/main/webapp/index.html rename to dspace-webapp-boot/src/main/resources/static/index.html diff --git a/dspace-server-webapp/src/main/webapp/js/hal/http/client.js b/dspace-webapp-boot/src/main/resources/static/js/hal/http/client.js similarity index 100% rename from dspace-server-webapp/src/main/webapp/js/hal/http/client.js rename to dspace-webapp-boot/src/main/resources/static/js/hal/http/client.js diff --git a/dspace-server-webapp/src/main/webapp/js/vendor/CustomPostForm.js b/dspace-webapp-boot/src/main/resources/static/js/vendor/CustomPostForm.js similarity index 100% rename from dspace-server-webapp/src/main/webapp/js/vendor/CustomPostForm.js rename to dspace-webapp-boot/src/main/resources/static/js/vendor/CustomPostForm.js diff --git a/dspace-server-webapp/src/main/webapp/login.html b/dspace-webapp-boot/src/main/resources/static/login.html similarity index 100% rename from dspace-server-webapp/src/main/webapp/login.html rename to dspace-webapp-boot/src/main/resources/static/login.html diff --git a/dspace-server-webapp/src/main/webapp/styles.css b/dspace-webapp-boot/src/main/resources/static/styles.css similarity index 100% rename from dspace-server-webapp/src/main/webapp/styles.css rename to dspace-webapp-boot/src/main/resources/static/styles.css diff --git a/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java b/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java similarity index 100% rename from dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java rename to dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java diff --git a/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java b/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java similarity index 100% rename from dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java rename to dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java diff --git a/dspace/config/log4j2-console.xml b/dspace/config/log4j2-console.xml index 3d51b123367b..a0322abf19d3 100644 --- a/dspace/config/log4j2-console.xml +++ b/dspace/config/log4j2-console.xml @@ -25,7 +25,7 @@ For command line / Ant scripts, we are only concerned about significant warnings/errors. For the full detail, change this to INFO and re-run Ant. --> - + diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml deleted file mode 100644 index f890d39e6992..000000000000 --- a/dspace/modules/server/pom.xml +++ /dev/null @@ -1,355 +0,0 @@ - - 4.0.0 - org.dspace.modules - server - war - DSpace Server Webapp:: Local Customizations - Overlay customizations. -This is probably a temporary solution to the build problems. We like to investigate about -the possibility to remove the overlays enable a more flexible extension mechanism. -The use of web-fragment and spring mvc technology allow us to add request handlers -just adding new jar in the classloader - - - modules - org.dspace - cris-2022.03.01-SNAPSHOT - .. - - - - - ${basedir}/../../.. - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack - prepare-package - - unpack-dependencies - - - org.dspace.modules - additions - - ${project.build.directory}/additions - META-INF/** - - - - - - org.apache.maven.plugins - maven-war-plugin - - false - - true - - - - ${project.build.directory}/additions - WEB-INF/classes - - - - - - prepare-package - - - - - - org.codehaus.gmaven - groovy-maven-plugin - - - setproperty - initialize - - execute - - - - project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); - log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']); - - - - - - - - - - - - unit-test-environment - - false - - skipUnitTests - false - - - - - - - maven-dependency-plugin - - ${project.build.directory}/testing - - - org.dspace - dspace-parent - ${project.version} - zip - testEnvironment - - - - - - setupUnitTestEnvironment - generate-test-resources - - unpack - - - - - - - - maven-surefire-plugin - - - - - - ${agnostic.build.dir}/testing/dspace - - true - ${agnostic.build.dir}/testing/dspace/solr/ - - - - - - - - - org.dspace - dspace-server-webapp - test-jar - test - - - - - - - integration-test-environment - - false - - skipIntegrationTests - false - - - - - - - maven-dependency-plugin - - ${project.build.directory}/testing - - - org.dspace - dspace-parent - ${project.version} - zip - testEnvironment - - - - - - setupIntegrationTestEnvironment - pre-integration-test - - unpack - - - - - - - - maven-failsafe-plugin - - - - - ${agnostic.build.dir}/testing/dspace - - true - ${agnostic.build.dir}/testing/dspace/solr/ - - - - - - - - - org.dspace - dspace-server-webapp - test-jar - test - - - - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - - - - - org.dspace.modules - additions - - - org.dspace - dspace-server-webapp - classes - - - org.dspace - dspace-server-webapp - war - - - org.apache.solr - solr-solrj - ${solr.client.version} - - - - - org.dspace - dspace-api - test-jar - test - - - org.dspace - dspace-server-webapp - test-jar - test - - - org.springframework.boot - spring-boot-starter-test - test - - - org.springframework.security - spring-security-test - ${spring-security.version} - test - - - com.jayway.jsonpath - json-path-assert - ${json-path.version} - test - - - junit - junit - test - - - com.h2database - h2 - test - - - org.mockito - mockito-inline - test - - - - - org.apache.solr - solr-core - ${solr.client.version} - test - - - - org.apache.commons - commons-text - - - - - org.apache.lucene - lucene-analyzers-icu - test - - - - - diff --git a/dspace/modules/server/src/main/webapp/.gitignore b/dspace/modules/server/src/main/webapp/.gitignore deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/pom.xml b/pom.xml index d30aa94e2a2c..6a0ed9272cb2 100644 --- a/pom.xml +++ b/pom.xml @@ -798,6 +798,21 @@ + + + dspace-webapp-boot + + + dspace-webapp-boot/pom.xml + + + + dspace-webapp-boot + + + @@ -1133,32 +1148,24 @@ org.dspace dspace-server-webapp - test-jar - cris-2022.03.01-SNAPSHOT - test - - - org.dspace - dspace-rdf cris-2022.03.01-SNAPSHOT org.dspace - dspace-iiif + dspace-server-webapp + test-jar cris-2022.03.01-SNAPSHOT + test org.dspace - dspace-server-webapp + dspace-rdf cris-2022.03.01-SNAPSHOT - jar - classes org.dspace - dspace-server-webapp + dspace-iiif cris-2022.03.01-SNAPSHOT - war From e9ed6d2d6327f1ce8aab8df6f498a462b37e6f56 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 15:32:59 +0100 Subject: [PATCH 043/686] [DSC-963] Fixed test configuration --- .../src/test/resources/application.properties | 64 +++++++++++++++++++ .../src/main/resources/application.properties | 7 +- 2 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 dspace-server-webapp/src/test/resources/application.properties diff --git a/dspace-server-webapp/src/test/resources/application.properties b/dspace-server-webapp/src/test/resources/application.properties new file mode 100644 index 000000000000..9b408d9612de --- /dev/null +++ b/dspace-server-webapp/src/test/resources/application.properties @@ -0,0 +1,64 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +# Spring Boot's Test application.properties + +######################## +# Jackson serialization settings +# +spring.jackson.serialization.fail-on-empty-beans=false + +######################## +# Internationalization +# +# Base Path for our messages file (i18n) +spring.messages.basename=i18n/messages +spring.messages.encoding=UTF-8 + +######################## +# URI Encoding and Decoding +# +# +# Charset of HTTP requests and responses. Added to the "Content-Type" header if not set explicitly. +server.servlet.encoding.charset=UTF-8 +# Force the encoding to the configured charset on HTTP requests and responses. +server.servlet.encoding.force=true + +########################### +# Server Properties +# +# Error handling settings +server.error.include-stacktrace = never + +# When to include the error message in error responses (introduced in Spring 2.3.x) +server.error.include-message = always + +# Spring Boot proxy configuration (can be overridden in local.cfg). +server.forward-headers-strategy=FRAMEWORK + +###################### +# Cache Properties +# Added for IIIF cache support. +# Path to configuration file. +spring.cache.jcache.config=classpath:iiif/cache/ehcache.xml + +###################### +# Spring Boot Autoconfigure +# +# TODO: At some point we may want to investigate whether we can re-enable these and remove the custom DSpace init code +spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \ + org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \ + org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration, \ + org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration, \ + org.springframework.boot.autoconfigure.velocity.VelocityAutoConfiguration + +spring.main.allow-bean-definition-overriding = true + +######################### +# Spring Boot Logging levels +logging.config = classpath:log4j2-test.xml diff --git a/dspace-webapp-boot/src/main/resources/application.properties b/dspace-webapp-boot/src/main/resources/application.properties index 8dd8e2903cc6..0c26d530b74c 100644 --- a/dspace-webapp-boot/src/main/resources/application.properties +++ b/dspace-webapp-boot/src/main/resources/application.properties @@ -27,7 +27,6 @@ # http://docs.spring.io/spring-boot/docs/current/reference/html/common-application-properties.html # -server.servlet.context-path=/server ######################## # DSpace Settings # @@ -38,6 +37,12 @@ server.servlet.context-path=/server # NOTE: this configuration is filled out by Apache Ant during the DSpace install/update process. It does NOT # interact with or read its configuration from dspace.cfg. dspace.dir=${dspace.dir} + +######################## +# Servlet context path configuration for spring boot application running with embedded tomcat +# +server.servlet.context-path=/server + ######################## # Jackson serialization settings # From 98ef4e560f9625df842cd4a5d442c9b852eff503 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 15:37:57 +0100 Subject: [PATCH 044/686] [DSC-963] Fixed dspace pom --- dspace/pom.xml | 6 +++++- pom.xml | 5 +++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/dspace/pom.xml b/dspace/pom.xml index 47ef9c46475a..e3e05f0c096b 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -217,7 +217,11 @@ org.dspace dspace-server-webapp - war + compile + + + org.dspace + dspace-webapp-boot compile diff --git a/pom.xml b/pom.xml index 6a0ed9272cb2..fd6542421c77 100644 --- a/pom.xml +++ b/pom.xml @@ -1157,6 +1157,11 @@ cris-2022.03.01-SNAPSHOT test + + org.dspace + dspace-webapp-boot + cris-2022.03.01-SNAPSHOT + org.dspace dspace-rdf From baeab16708a01f0c6a2d32886040292288174190 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 16:14:50 +0100 Subject: [PATCH 045/686] [DSC-963] Improved tests configuration --- .../src/main/resources/application.properties | 0 .../src/test/resources/application.properties | 64 ------------------- 2 files changed, 64 deletions(-) rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/application.properties (100%) delete mode 100644 dspace-server-webapp/src/test/resources/application.properties diff --git a/dspace-webapp-boot/src/main/resources/application.properties b/dspace-server-webapp/src/main/resources/application.properties similarity index 100% rename from dspace-webapp-boot/src/main/resources/application.properties rename to dspace-server-webapp/src/main/resources/application.properties diff --git a/dspace-server-webapp/src/test/resources/application.properties b/dspace-server-webapp/src/test/resources/application.properties deleted file mode 100644 index 9b408d9612de..000000000000 --- a/dspace-server-webapp/src/test/resources/application.properties +++ /dev/null @@ -1,64 +0,0 @@ -# -# The contents of this file are subject to the license and copyright -# detailed in the LICENSE and NOTICE files at the root of the source -# tree and available online at -# -# http://www.dspace.org/license/ -# - -# Spring Boot's Test application.properties - -######################## -# Jackson serialization settings -# -spring.jackson.serialization.fail-on-empty-beans=false - -######################## -# Internationalization -# -# Base Path for our messages file (i18n) -spring.messages.basename=i18n/messages -spring.messages.encoding=UTF-8 - -######################## -# URI Encoding and Decoding -# -# -# Charset of HTTP requests and responses. Added to the "Content-Type" header if not set explicitly. -server.servlet.encoding.charset=UTF-8 -# Force the encoding to the configured charset on HTTP requests and responses. -server.servlet.encoding.force=true - -########################### -# Server Properties -# -# Error handling settings -server.error.include-stacktrace = never - -# When to include the error message in error responses (introduced in Spring 2.3.x) -server.error.include-message = always - -# Spring Boot proxy configuration (can be overridden in local.cfg). -server.forward-headers-strategy=FRAMEWORK - -###################### -# Cache Properties -# Added for IIIF cache support. -# Path to configuration file. -spring.cache.jcache.config=classpath:iiif/cache/ehcache.xml - -###################### -# Spring Boot Autoconfigure -# -# TODO: At some point we may want to investigate whether we can re-enable these and remove the custom DSpace init code -spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \ - org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \ - org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration, \ - org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration, \ - org.springframework.boot.autoconfigure.velocity.VelocityAutoConfiguration - -spring.main.allow-bean-definition-overriding = true - -######################### -# Spring Boot Logging levels -logging.config = classpath:log4j2-test.xml From 05b6251469d5873a7bda8edc27cf81b17d22e2de Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 16:42:00 +0100 Subject: [PATCH 046/686] [DSC-963] Fixed dspace-server-webapp pom --- dspace-server-webapp/pom.xml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index e6423a7bc8bc..fa607629e7d8 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -57,6 +57,25 @@ + + org.apache.maven.plugins + maven-jar-plugin + + + + true + true + + + + + + + test-jar + + + + From ab2ff11216eb38598e9fbe32ada6a496e2633f8f Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 17:39:33 +0100 Subject: [PATCH 047/686] [DSC-963] Fixed Sword tests --- .../src/test/java/org/dspace/app/rdf/RdfIT.java | 2 +- .../src/test/java/org/dspace/app/sword/Swordv1IT.java | 2 +- .../src/test/java/org/dspace/app/sword2/Swordv2IT.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java index 85ab3dcadd78..10f06370ad5c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java @@ -47,7 +47,7 @@ */ // Ensure the RDF endpoint IS ENABLED before any tests run. // This annotation overrides default DSpace config settings loaded into Spring Context -@TestPropertySource(properties = {"rdf.enabled = true"}) +@TestPropertySource(properties = {"rdf.enabled = true", "server.servlet.context-path = /"}) public class RdfIT extends AbstractWebClientIntegrationTest { @Autowired diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java index 24244e1773e6..ffef89316b93 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java @@ -34,7 +34,7 @@ */ // Ensure the SWORD SERVER IS ENABLED before any tests run. // This annotation overrides default DSpace config settings loaded into Spring Context -@TestPropertySource(properties = {"sword-server.enabled = true"}) +@TestPropertySource(properties = { "sword-server.enabled = true", "server.servlet.context-path = /" }) public class Swordv1IT extends AbstractWebClientIntegrationTest { @Autowired diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java index 95ec76251415..f9caeead664e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java @@ -34,7 +34,7 @@ */ // Ensure the SWORDv2 SERVER IS ENABLED before any tests run. // This annotation overrides default DSpace config settings loaded into Spring Context -@TestPropertySource(properties = {"swordv2-server.enabled = true"}) +@TestPropertySource(properties = {"swordv2-server.enabled = true", "server.servlet.context-path = /"}) public class Swordv2IT extends AbstractWebClientIntegrationTest { @Autowired From 819bf788081dbc69c82ce050ba64aeb3a964415e Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 17:50:08 +0100 Subject: [PATCH 048/686] [DSC-963] Fixed dspace pom --- dspace/pom.xml | 1 + pom.xml | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/dspace/pom.xml b/dspace/pom.xml index e3e05f0c096b..0dba032e688b 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -222,6 +222,7 @@ org.dspace dspace-webapp-boot + war compile diff --git a/pom.xml b/pom.xml index fd6542421c77..e17dfbf384c0 100644 --- a/pom.xml +++ b/pom.xml @@ -1148,29 +1148,30 @@ org.dspace dspace-server-webapp + test-jar cris-2022.03.01-SNAPSHOT + test org.dspace - dspace-server-webapp - test-jar + dspace-rdf cris-2022.03.01-SNAPSHOT - test org.dspace - dspace-webapp-boot + dspace-iiif cris-2022.03.01-SNAPSHOT org.dspace - dspace-rdf + dspace-server-webapp cris-2022.03.01-SNAPSHOT org.dspace - dspace-iiif + dspace-webapp-boot cris-2022.03.01-SNAPSHOT + war From 382105dfdae418eb6a238f1bfa9d968c82a58727 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 19:32:46 +0100 Subject: [PATCH 049/686] [DSC-963] Set default servlet context path on application-test.properties --- .../src/test/java/org/dspace/app/rdf/RdfIT.java | 2 +- .../src/test/java/org/dspace/app/sword/Swordv1IT.java | 2 +- .../src/test/java/org/dspace/app/sword2/Swordv2IT.java | 2 +- .../src/test/resources/application-test.properties | 4 +++- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java index 10f06370ad5c..85ab3dcadd78 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java @@ -47,7 +47,7 @@ */ // Ensure the RDF endpoint IS ENABLED before any tests run. // This annotation overrides default DSpace config settings loaded into Spring Context -@TestPropertySource(properties = {"rdf.enabled = true", "server.servlet.context-path = /"}) +@TestPropertySource(properties = {"rdf.enabled = true"}) public class RdfIT extends AbstractWebClientIntegrationTest { @Autowired diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java index ffef89316b93..24244e1773e6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java @@ -34,7 +34,7 @@ */ // Ensure the SWORD SERVER IS ENABLED before any tests run. // This annotation overrides default DSpace config settings loaded into Spring Context -@TestPropertySource(properties = { "sword-server.enabled = true", "server.servlet.context-path = /" }) +@TestPropertySource(properties = {"sword-server.enabled = true"}) public class Swordv1IT extends AbstractWebClientIntegrationTest { @Autowired diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java index f9caeead664e..95ec76251415 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java @@ -34,7 +34,7 @@ */ // Ensure the SWORDv2 SERVER IS ENABLED before any tests run. // This annotation overrides default DSpace config settings loaded into Spring Context -@TestPropertySource(properties = {"swordv2-server.enabled = true", "server.servlet.context-path = /"}) +@TestPropertySource(properties = {"swordv2-server.enabled = true"}) public class Swordv2IT extends AbstractWebClientIntegrationTest { @Autowired diff --git a/dspace-server-webapp/src/test/resources/application-test.properties b/dspace-server-webapp/src/test/resources/application-test.properties index 9a396cf8e5b1..e92e1166e355 100644 --- a/dspace-server-webapp/src/test/resources/application-test.properties +++ b/dspace-server-webapp/src/test/resources/application-test.properties @@ -14,4 +14,6 @@ ## Log4j2 configuration for test environment ## This file is found on classpath at src/test/resources/log4j2-test.xml -logging.config = classpath:log4j2-test.xml \ No newline at end of file +logging.config = classpath:log4j2-test.xml + +server.servlet.context-path=/ \ No newline at end of file From 7524053a5c10071b99edf453a112ecc30f06779f Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 10 Mar 2023 20:41:07 +0100 Subject: [PATCH 050/686] [DSC-963] Improved TestApplication configuration --- .../test/java/org/dspace/app/{ => rest}/TestApplication.java | 5 ++--- .../app/rest/test/AbstractControllerIntegrationTest.java | 2 +- .../app/rest/test/AbstractWebClientIntegrationTest.java | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) rename dspace-server-webapp/src/test/java/org/dspace/app/{ => rest}/TestApplication.java (70%) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/TestApplication.java similarity index 70% rename from dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java rename to dspace-server-webapp/src/test/java/org/dspace/app/rest/TestApplication.java index 0f80e866edd7..e387e3f0024d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/TestApplication.java @@ -5,12 +5,11 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app; +package org.dspace.app.rest; -import org.dspace.app.rest.WebApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -@SpringBootApplication(scanBasePackageClasses = WebApplication.class) +@SpringBootApplication public class TestApplication { } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java index 4ec66fb00081..a27e0ab75c8c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; -import org.dspace.app.TestApplication; +import org.dspace.app.rest.TestApplication; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java index be0a27b4ebd1..7f58a9999dd4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java @@ -9,7 +9,7 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; -import org.dspace.app.TestApplication; +import org.dspace.app.rest.TestApplication; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer; import org.junit.runner.RunWith; From 4748163eafd04e728f5201f31b29c9c0b8d8b9e8 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Sat, 11 Mar 2023 15:49:02 +0000 Subject: [PATCH 051/686] Fix #8714 and #8715 --- .../org/dspace/discovery/IndexClient.java | 62 ++++++++++--------- .../org/dspace/discovery/IndexingUtils.java | 4 ++ .../org/dspace/discovery/SolrServiceImpl.java | 7 ++- 3 files changed, 44 insertions(+), 29 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java index fcb3e79d1d4b..661c48d91cfc 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java @@ -56,37 +56,18 @@ public void internalRun() throws Exception { * new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer"); */ - if (indexClientOptions == IndexClientOptions.REMOVE) { - handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); - indexer.unIndexContent(context, commandLine.getOptionValue("r")); - } else if (indexClientOptions == IndexClientOptions.CLEAN) { - handler.logInfo("Cleaning Index"); - indexer.cleanIndex(); - } else if (indexClientOptions == IndexClientOptions.DELETE) { - handler.logInfo("Deleting Index"); - indexer.deleteIndex(); - } else if (indexClientOptions == IndexClientOptions.BUILD || - indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - handler.logInfo("(Re)building index from scratch."); - indexer.deleteIndex(); - indexer.createIndex(context); - if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } - } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { - handler.logInfo("Optimizing search core."); - indexer.optimize(); - } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } else if (indexClientOptions == IndexClientOptions.INDEX) { - final String param = commandLine.getOptionValue('i'); + Optional indexableObject = Optional.empty(); + + if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) { + final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') : + commandLine.getOptionValue('i'); UUID uuid = null; try { uuid = UUID.fromString(param); } catch (Exception e) { - // nothing to do, it should be an handle + // nothing to do, it should be a handle } - Optional indexableObject = Optional.empty(); + if (uuid != null) { final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid); if (item != null) { @@ -118,7 +99,32 @@ public void internalRun() throws Exception { if (!indexableObject.isPresent()) { throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object"); } - handler.logInfo("Indexing " + param + " force " + commandLine.hasOption("f")); + } + + if (indexClientOptions == IndexClientOptions.REMOVE) { + handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); + indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID()); + } else if (indexClientOptions == IndexClientOptions.CLEAN) { + handler.logInfo("Cleaning Index"); + indexer.cleanIndex(); + } else if (indexClientOptions == IndexClientOptions.DELETE) { + handler.logInfo("Deleting Index"); + indexer.deleteIndex(); + } else if (indexClientOptions == IndexClientOptions.BUILD || + indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + handler.logInfo("(Re)building index from scratch."); + indexer.deleteIndex(); + indexer.createIndex(context); + if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } + } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { + handler.logInfo("Optimizing search core."); + indexer.optimize(); + } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } else if (indexClientOptions == IndexClientOptions.INDEX) { + handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f")); final long startTimeMillis = System.currentTimeMillis(); final long count = indexAll(indexer, ContentServiceFactory.getInstance(). getItemService(), context, indexableObject.get()); @@ -179,7 +185,7 @@ private static long indexAll(final IndexingService indexingService, indexingService.indexContent(context, dso, true, true); count++; if (dso.getIndexedObject() instanceof Community) { - final Community community = (Community) dso; + final Community community = (Community) dso.getIndexedObject(); final String communityHandle = community.getHandle(); for (final Community subcommunity : community.getSubcommunities()) { count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity)); diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java index 8dd02f5d44e0..aa90ccf4a371 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java @@ -107,6 +107,10 @@ static List findDirectlyAuthorizedGroupAndEPersonPrefixedIds( ArrayList prefixedIds = new ArrayList<>(); for (int auth : authorizations) { for (ResourcePolicy policy : authService.getPoliciesActionFilter(context, obj, auth)) { + // Avoid NPE in cases where the policy does not have group or eperson + if (policy.getGroup() == null && policy.getEPerson() == null) { + continue; + } String prefixedId = policy.getGroup() == null ? "e" + policy.getEPerson().getID() : "g" + policy.getGroup().getID(); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index 68d3b48ec098..0cf2aa50af67 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -256,7 +256,12 @@ public void unIndexContent(Context context, String searchUniqueID, boolean commi try { if (solrSearchCore.getSolr() != null) { - indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID).delete(searchUniqueID); + IndexFactory index = indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID); + if (index != null) { + index.delete(searchUniqueID); + } else { + log.warn("Object not found in Solr index: " + searchUniqueID); + } if (commit) { solrSearchCore.getSolr().commit(); } From fa651fea6d986c7433b93e93c4bb2f531b9eefc9 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Mon, 13 Mar 2023 10:33:06 +0100 Subject: [PATCH 052/686] [DSC-963] Added @Order on AdminRestPermissionEvaluatorPlugin --- .../app/rest/security/AdminRestPermissionEvaluatorPlugin.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java index 0d251f6400f7..338eed4a7340 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java @@ -20,6 +20,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.Order; import org.springframework.security.core.Authentication; import org.springframework.stereotype.Component; @@ -29,6 +31,7 @@ * the authenticated EPerson is allowed to perform the requested action. */ @Component +@Order(value = Ordered.HIGHEST_PRECEDENCE) public class AdminRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { private static final Logger log = LoggerFactory.getLogger(RestObjectPermissionEvaluatorPlugin.class); From bdf867541d99a61342e3723d8c47e5bc653ae1cd Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 3 Mar 2023 12:41:51 +0100 Subject: [PATCH 053/686] [DSC-968] Adding pagination on bitstream cleanup --- .../dspace/content/BitstreamServiceImpl.java | 4 +- .../org/dspace/content/dao/BitstreamDAO.java | 2 +- .../content/dao/impl/BitstreamDAOImpl.java | 5 +- .../content/service/BitstreamService.java | 2 +- .../bitstore/BitstreamStorageServiceImpl.java | 134 ++++++++++-------- 5 files changed, 82 insertions(+), 65 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 071bf3972fcb..cc89cea33a25 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -332,8 +332,8 @@ public void updateLastModified(Context context, Bitstream bitstream) { } @Override - public List findDeletedBitstreams(Context context) throws SQLException { - return bitstreamDAO.findDeletedBitstreams(context); + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException { + return bitstreamDAO.findDeletedBitstreams(context, limit, offset); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java index c1ef92313127..0d7afaa3cd73 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java @@ -29,7 +29,7 @@ public interface BitstreamDAO extends DSpaceObjectLegacySupportDAO { public Iterator findAll(Context context, int limit, int offset) throws SQLException; - public List findDeletedBitstreams(Context context) throws SQLException; + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException; public List findDuplicateInternalIdentifier(Context context, Bitstream bitstream) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java index 02e3509c311a..d6d77fe7f0c7 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java @@ -41,13 +41,14 @@ protected BitstreamDAOImpl() { } @Override - public List findDeletedBitstreams(Context context) throws SQLException { + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Bitstream.class); Root bitstreamRoot = criteriaQuery.from(Bitstream.class); criteriaQuery.select(bitstreamRoot); + criteriaQuery.orderBy(criteriaBuilder.desc(bitstreamRoot.get(Bitstream_.ID))); criteriaQuery.where(criteriaBuilder.equal(bitstreamRoot.get(Bitstream_.deleted), true)); - return list(context, criteriaQuery, false, Bitstream.class, -1, -1); + return list(context, criteriaQuery, false, Bitstream.class, limit, offset); } diff --git a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java index 4621c95e7c89..8effabf28435 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java @@ -183,7 +183,7 @@ public InputStream retrieve(Context context, Bitstream bitstream) * @return a list of all bitstreams that have been "deleted" * @throws SQLException if database error */ - public List findDeletedBitstreams(Context context) throws SQLException; + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException; /** diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java index b8a1a2e96ad4..977b5b7b32b6 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java @@ -17,6 +17,7 @@ import java.util.UUID; import javax.annotation.Nullable; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections4.MapUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -224,25 +225,62 @@ public InputStream retrieve(Context context, Bitstream bitstream) @Override public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException { Context context = new Context(Context.Mode.BATCH_EDIT); - int commitCounter = 0; + + int offset = 0; + int limit = 100; + + int cleanedBitstreamCount = 0; + + int deletedBitstreamCount = bitstreamService.countDeletedBitstreams(context); + System.out.println("Found " + deletedBitstreamCount + " deleted bistream to cleanup"); try { context.turnOffAuthorisationSystem(); - List storage = bitstreamService.findDeletedBitstreams(context); - for (Bitstream bitstream : storage) { - UUID bid = bitstream.getID(); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("modified", null); - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + while (cleanedBitstreamCount < deletedBitstreamCount) { + List storage = bitstreamService.findDeletedBitstreams(context, limit, offset); + + if (CollectionUtils.isEmpty(storage)) { + break; + } + + for (Bitstream bitstream : storage) { + UUID bid = bitstream.getID(); + Map wantedMetadata = new HashMap(); + wantedMetadata.put("size_bytes", null); + wantedMetadata.put("modified", null); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + + + // Make sure entries which do not exist are removed + if (MapUtils.isEmpty(receivedMetadata)) { + log.debug("bitstore.about is empty, so file is not present"); + if (deleteDbRecords) { + log.debug("deleting record"); + if (verbose) { + System.out.println(" - Deleting bitstream information (ID: " + bid + ")"); + } + checksumHistoryService.deleteByBitstream(context, bitstream); + if (verbose) { + System.out.println(" - Deleting bitstream record from database (ID: " + bid + ")"); + } + bitstreamService.expunge(context, bitstream); + } + context.uncacheEntity(bitstream); + continue; + } + + // This is a small chance that this is a file which is + // being stored -- get it next time. + if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString()))) { + log.debug("file is recent"); + context.uncacheEntity(bitstream); + continue; + } - // Make sure entries which do not exist are removed - if (MapUtils.isEmpty(receivedMetadata)) { - log.debug("bitstore.about is empty, so file is not present"); if (deleteDbRecords) { - log.debug("deleting record"); + log.debug("deleting db record"); if (verbose) { System.out.println(" - Deleting bitstream information (ID: " + bid + ")"); } @@ -252,64 +290,42 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio } bitstreamService.expunge(context, bitstream); } - context.uncacheEntity(bitstream); - continue; - } - - // This is a small chance that this is a file which is - // being stored -- get it next time. - if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString()))) { - log.debug("file is recent"); - context.uncacheEntity(bitstream); - continue; - } - if (deleteDbRecords) { - log.debug("deleting db record"); - if (verbose) { - System.out.println(" - Deleting bitstream information (ID: " + bid + ")"); + if (isRegisteredBitstream(bitstream.getInternalId())) { + context.uncacheEntity(bitstream); + continue; // do not delete registered bitstreams } - checksumHistoryService.deleteByBitstream(context, bitstream); - if (verbose) { - System.out.println(" - Deleting bitstream record from database (ID: " + bid + ")"); + + + // Since versioning allows for multiple bitstreams, check if the internal + // identifier isn't used on + // another place + if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) { + this.getStore(bitstream.getStoreNumber()).remove(bitstream); + + String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId()); + if (log.isDebugEnabled()) { + log.debug(message); + } + if (verbose) { + System.out.println(message); + } } - bitstreamService.expunge(context, bitstream); - } - if (isRegisteredBitstream(bitstream.getInternalId())) { context.uncacheEntity(bitstream); - continue; // do not delete registered bitstreams } + // Commit actual changes to DB after dispatch events + System.out.print("Performing incremental commit to the database..."); + context.commit(); + System.out.println(" Incremental commit done!"); - // Since versioning allows for multiple bitstreams, check if the internal identifier isn't used on - // another place - if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) { - this.getStore(bitstream.getStoreNumber()).remove(bitstream); - - String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId()); - if (log.isDebugEnabled()) { - log.debug(message); - } - if (verbose) { - System.out.println(message); - } - } + cleanedBitstreamCount = cleanedBitstreamCount + storage.size(); - // Make sure to commit our outstanding work every 100 - // iterations. Otherwise you risk losing the entire transaction - // if we hit an exception, which isn't useful at all for large - // amounts of bitstreams. - commitCounter++; - if (commitCounter % 100 == 0) { - context.dispatchEvents(); - // Commit actual changes to DB after dispatch events - System.out.print("Performing incremental commit to the database..."); - context.commit(); - System.out.println(" Incremental commit done!"); + if (!deleteDbRecords) { + offset = offset + limit; } - context.uncacheEntity(bitstream); } System.out.print("Committing changes to the database..."); From f82834bd574feeffd00070571198957d5d3f5358 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Mon, 13 Mar 2023 14:36:57 +0000 Subject: [PATCH 054/686] ItemOwningCollectionUpdateRestController: add support for inheritPolicies flag whem moving items --- ...mOwningCollectionUpdateRestController.java | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java index b06360ee1dc2..1a924f7e748e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java @@ -40,6 +40,7 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.bind.annotation.RequestParam; /** * This controller will handle all the incoming calls on the api/code/items/{uuid}/owningCollection endpoint @@ -69,6 +70,7 @@ public class ItemOwningCollectionUpdateRestController { * moving the item to the new collection. * * @param uuid The UUID of the item that will be moved + * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when moving the item * @param response The response object * @param request The request object * @return The wrapped resource containing the new owning collection or null when the item was not moved @@ -79,7 +81,9 @@ public class ItemOwningCollectionUpdateRestController { @RequestMapping(method = RequestMethod.PUT, consumes = {"text/uri-list"}) @PreAuthorize("hasPermission(#uuid, 'ITEM','WRITE')") @PostAuthorize("returnObject != null") - public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response, + public CollectionRest move(@PathVariable UUID uuid, + @RequestParam(name = "inheritPolicies", defaultValue = "false") Boolean inheritCollectionPolicies, + HttpServletResponse response, HttpServletRequest request) throws SQLException, IOException, AuthorizeException { Context context = ContextUtil.obtainContext(request); @@ -91,7 +95,7 @@ public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response "or the data cannot be resolved to a collection."); } - Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0)); + Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0), inheritCollectionPolicies); if (targetCollection == null) { return null; @@ -107,17 +111,19 @@ public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response * @param item The item to be moved * @param currentCollection The current owning collection of the item * @param targetCollection The target collection of the item + * @param inheritPolicies Boolean flag whether to inherit the target collection policies when moving the item * @return The target collection * @throws SQLException If something goes wrong * @throws IOException If something goes wrong * @throws AuthorizeException If the user is not authorized to perform the move action */ private Collection moveItem(final Context context, final Item item, final Collection currentCollection, - final Collection targetCollection) + final Collection targetCollection, + final boolean inheritPolicies) throws SQLException, IOException, AuthorizeException { - itemService.move(context, item, currentCollection, targetCollection); - //Necessary because Controller does not pass through general RestResourceController, and as such does not do its - // commit in DSpaceRestRepository.createAndReturn() or similar + itemService.move(context, item, currentCollection, targetCollection, inheritPolicies); + // Necessary because Controller does not pass through general RestResourceController, and as such does not do its + // commit in DSpaceRestRepository.createAndReturn() or similar context.commit(); return context.reloadEntity(targetCollection); @@ -129,12 +135,14 @@ private Collection moveItem(final Context context, final Item item, final Collec * @param context The context Object * @param itemUuid The uuid of the item to be moved * @param targetCollection The target collection + * @param inheritPolicies Whether to inherit the target collection policies when moving the item * @return The new owning collection of the item when authorized or null when not authorized * @throws SQLException If something goes wrong * @throws IOException If something goes wrong * @throws AuthorizeException If the user is not authorized to perform the move action */ - private Collection performItemMove(final Context context, final UUID itemUuid, final Collection targetCollection) + private Collection performItemMove(final Context context, final UUID itemUuid, final Collection targetCollection, + boolean inheritPolicies) throws SQLException, IOException, AuthorizeException { Item item = itemService.find(context, itemUuid); @@ -153,7 +161,7 @@ private Collection performItemMove(final Context context, final UUID itemUuid, f if (authorizeService.authorizeActionBoolean(context, currentCollection, Constants.ADMIN)) { - return moveItem(context, item, currentCollection, targetCollection); + return moveItem(context, item, currentCollection, targetCollection, inheritPolicies); } return null; From 11d2d5a3c72bb6122973263abed94fef655ea9fa Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Mon, 13 Mar 2023 15:07:04 +0000 Subject: [PATCH 055/686] Fix Checkstyle issues --- .../ItemOwningCollectionUpdateRestController.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java index 1a924f7e748e..b5a0c957f265 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java @@ -39,8 +39,8 @@ import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; /** * This controller will handle all the incoming calls on the api/code/items/{uuid}/owningCollection endpoint @@ -70,7 +70,8 @@ public class ItemOwningCollectionUpdateRestController { * moving the item to the new collection. * * @param uuid The UUID of the item that will be moved - * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when moving the item + * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when + * moving the item * @param response The response object * @param request The request object * @return The wrapped resource containing the new owning collection or null when the item was not moved @@ -82,7 +83,8 @@ public class ItemOwningCollectionUpdateRestController { @PreAuthorize("hasPermission(#uuid, 'ITEM','WRITE')") @PostAuthorize("returnObject != null") public CollectionRest move(@PathVariable UUID uuid, - @RequestParam(name = "inheritPolicies", defaultValue = "false") Boolean inheritCollectionPolicies, + @RequestParam(name = "inheritPolicies", defaultValue = "false") + Boolean inheritCollectionPolicies, HttpServletResponse response, HttpServletRequest request) throws SQLException, IOException, AuthorizeException { @@ -95,7 +97,8 @@ public CollectionRest move(@PathVariable UUID uuid, "or the data cannot be resolved to a collection."); } - Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0), inheritCollectionPolicies); + Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0), + inheritCollectionPolicies); if (targetCollection == null) { return null; @@ -122,8 +125,8 @@ private Collection moveItem(final Context context, final Item item, final Collec final boolean inheritPolicies) throws SQLException, IOException, AuthorizeException { itemService.move(context, item, currentCollection, targetCollection, inheritPolicies); - // Necessary because Controller does not pass through general RestResourceController, and as such does not do its - // commit in DSpaceRestRepository.createAndReturn() or similar + // Necessary because Controller does not pass through general RestResourceController, and as such does not do + // its commit in DSpaceRestRepository.createAndReturn() or similar context.commit(); return context.reloadEntity(targetCollection); From 4436549f0b33e90de3069d38a5a5272d889db3fe Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Mon, 13 Mar 2023 11:46:20 +0100 Subject: [PATCH 056/686] [DSC-963] Minor improvements --- dspace-server-webapp/pom.xml | 31 +++++++------------ .../org/dspace/app/rest/WebApplication.java | 10 ++---- .../app/{rest => }/TestApplication.java | 11 +++++-- .../AbstractControllerIntegrationTest.java | 2 +- .../AbstractWebClientIntegrationTest.java | 2 +- .../main/java/org/dspace/app/Application.java | 13 ++++++++ dspace/config/log4j2-console.xml | 2 +- 7 files changed, 39 insertions(+), 32 deletions(-) rename dspace-server-webapp/src/test/java/org/dspace/app/{rest => }/TestApplication.java (55%) diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index fa607629e7d8..c884ea7d57c1 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -28,6 +28,18 @@ + + org.apache.maven.plugins + maven-jar-plugin + + + + + test-jar + + + + - + From 5cd9476fb812c56e0fe44b04d82ec1e73c857bbf Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Mon, 13 Mar 2023 17:44:07 +0100 Subject: [PATCH 057/686] [DSC-963] Fixed ItemRestRepositoryIT and GenericAuthorizationFeatureIT integration tests --- .../ExternalSourceItemUriListHandler.java | 8 +++++--- .../GenericAuthorizationFeatureIT.java | 18 ++++++++++++------ 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java index d619100bf67a..201a7ba1633d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java @@ -30,16 +30,19 @@ @Component public class ExternalSourceItemUriListHandler extends ExternalSourceEntryItemUriListHandler { + private Pattern pattern = Pattern.compile("\\/api\\/core\\/items\\/(.*)"); + @Autowired private ItemService itemService; @Override @SuppressWarnings("rawtypes") public boolean supports(List uriList, String method,Class clazz) { - if (clazz != Item.class) { + if (clazz != Item.class || uriList.size() != 1) { return false; } - return true; + + return pattern.matcher(uriList.get(0)).find(); } @Override @@ -61,7 +64,6 @@ public boolean validate(Context context, HttpServletRequest request, List uriList) { Item item = null; String url = uriList.get(0); - Pattern pattern = Pattern.compile("\\/api\\/core\\/items\\/(.*)"); Matcher matcher = pattern.matcher(url); if (!matcher.find()) { throw new DSpaceBadRequestException("The uri: " + url + " doesn't resolve to an item"); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java index 1d3b5b051605..e6ccf5954c7a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java @@ -757,7 +757,8 @@ public void testCanMoveAdmin() throws Exception { // Verify the general admin has this feature on item 1 getClient(adminToken).perform( get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + + "http://localhost/api/core/items/" + item1.getID()) + .param("size", "1000")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -765,7 +766,8 @@ public void testCanMoveAdmin() throws Exception { // Verify community A admin has this feature on item 1 getClient(communityAAdminToken).perform( get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + + "http://localhost/api/core/items/" + item1.getID()) + .param("size", "1000")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -773,7 +775,8 @@ public void testCanMoveAdmin() throws Exception { // Verify collection X admin has this feature on item 1 getClient(collectionXAdminToken).perform( get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + + "http://localhost/api/core/items/" + item1.getID()) + .param("size", "1000")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -781,7 +784,8 @@ public void testCanMoveAdmin() throws Exception { // Verify item 1 admin doesn’t have this feature on item 1 getClient(item1AdminToken).perform( get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + + "http://localhost/api/core/items/" + item1.getID()) + .param("size", "1000")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -789,7 +793,8 @@ public void testCanMoveAdmin() throws Exception { // Verify community A admin doesn’t have this feature on item 2 getClient(communityAAdminToken).perform( get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + + "http://localhost/api/core/items/" + item2.getID()) + .param("size", "1000")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -808,7 +813,8 @@ public void testCanMoveAdmin() throws Exception { // verify item 1 write has this feature on item 1 getClient(item1WriterToken).perform( get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + + "http://localhost/api/core/items/" + item1.getID()) + .param("size", "1000")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") .exists()); From 4c303770d56ef863155d03c74c54267324f82172 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 14 Mar 2023 11:31:12 +0100 Subject: [PATCH 058/686] [DSC-963] Added size parameter on GenericAuthorizationFeatureIT tests --- .../GenericAuthorizationFeatureIT.java | 340 ++++++++++-------- 1 file changed, 188 insertions(+), 152 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java index e6ccf5954c7a..d59ef00018b1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java @@ -209,7 +209,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { String siteId = ContentServiceFactory.getInstance().getSiteService().findSite(context).getID().toString(); // Verify the general admin has this feature on the site - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/sites/" + siteId)) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -217,14 +217,14 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin doesn’t have this feature on the site getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/sites/" + siteId)) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on community A - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -232,7 +232,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin has this feature on community A getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -240,7 +240,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin has this feature on community AA getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -248,7 +248,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify collection X admin doesn’t have this feature on community A getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -256,7 +256,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin doesn’t have this feature on community B getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityB.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -264,7 +264,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify the general admin has this feature on collection X getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -272,7 +272,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin has this feature on collection X getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -280,7 +280,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify collection X admin has this feature on collection X getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -288,7 +288,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin doesn’t have this feature on collection X getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -296,7 +296,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify collection X admin doesn’t have this feature on collection Y getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionY.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -304,7 +304,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify the general admin has this feature on item 1 getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -312,7 +312,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin has this feature on item 1 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -320,7 +320,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify collection X admin has this feature on item 1 getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -328,7 +328,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin has this feature on item 1 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -336,7 +336,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin doesn’t have this feature on item 2 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -344,7 +344,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify the general admin has this feature on the bundle in item 1 getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -352,7 +352,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin has this feature on the bundle in item 1 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -360,7 +360,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify collection X admin has this feature on the bundle in item 1 getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -368,7 +368,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin has this feature on the bundle in item 1 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -376,7 +376,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin doesn’t have this feature on the bundle in item 2 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -384,7 +384,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify the general admin has this feature on the bitstream in item 1 getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -392,7 +392,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify community A admin has this feature on the bitstream in item 1 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -400,7 +400,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify collection X admin has this feature on the bitstream in item 1 getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -408,7 +408,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin has this feature on the bitstream in item 1 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -416,7 +416,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { // Verify item 1 admin doesn’t have this feature on the bitstream in item 2 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -431,7 +431,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { // Verify the general admin has this feature on item 1 getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -439,7 +439,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { // Verify community A admin has this feature on item 1 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -447,7 +447,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { // Verify collection X admin has this feature on item 1 getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -455,7 +455,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { // Verify item 1 admin has this feature on item 1 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -463,7 +463,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { // Verify community A admin doesn’t have this feature on item 2 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -480,14 +480,14 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // (or doesn’t have access otherwise) if (hasDSOAccess) { getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -496,7 +496,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write doesn’t have this feature on community AA getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -504,7 +504,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write doesn’t have this feature on collection X getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -512,7 +512,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write doesn’t have this feature on item 1 getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -520,7 +520,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write doesn’t have this feature on the bundle in item 1 getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -528,7 +528,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write doesn’t have this feature on the bitstream in item 1 getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -536,7 +536,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write doesn’t have this feature on community A getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -544,7 +544,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write doesn’t have this feature on community AA getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -554,14 +554,14 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // (or doesn’t have access otherwise) if (hasDSOAccess) { getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -570,7 +570,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write doesn’t have this feature on item 1 getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -578,7 +578,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write doesn’t have this feature on the bundle in item 1 getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -586,7 +586,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write doesn’t have this feature on the bitstream in item 1 getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -594,7 +594,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write doesn’t have this feature on community A getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -602,7 +602,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write doesn’t have this feature on community AA getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -610,7 +610,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write doesn’t have this feature on collection X getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -620,14 +620,14 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // (or doesn’t have access otherwise) if (hasDSOAccess) { getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -636,7 +636,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write doesn’t have this feature on the bundle in item 1 getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -644,7 +644,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write doesn’t have this feature on the bitstream in item 1 getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -652,7 +652,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write doesn’t have this feature on community B getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityB.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -660,7 +660,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write doesn’t have this feature on collection Y getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionY.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -668,7 +668,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write doesn’t have this feature on item 2 getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -682,7 +682,7 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc // Verify community A write doesn’t have this feature on item 1 getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -690,7 +690,7 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc // Verify collection X write doesn’t have this feature on item 1 getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -700,14 +700,14 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc // (or doesn’t have access otherwise) if (hasDSOAccess) { getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -716,7 +716,7 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc // Verify item 1 write doesn’t have this feature on item 2 getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -756,45 +756,40 @@ public void testCanMoveAdmin() throws Exception { // Verify the general admin has this feature on item 1 getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID()) - .param("size", "1000")) + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID()) - .param("size", "1000")) + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID()) - .param("size", "1000")) + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 1 getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID()) - .param("size", "1000")) + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on item 2 getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID()) - .param("size", "1000")) + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + + "http://localhost/api/core/items/" + item2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -812,9 +807,8 @@ public void testCanMoveAdmin() throws Exception { // verify item 1 write has this feature on item 1 getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID()) - .param("size", "1000")) + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") .exists()); @@ -836,7 +830,7 @@ public void testCanMoveWriter() throws Exception { String item1WriterToken = getAuthToken(item1Writer.getEmail(), password); // verify item 1 write has this feature on item 1 getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") @@ -873,28 +867,30 @@ public void testCanDeleteAdmin() throws Exception { final String feature = "canDelete"; // Verify the general admin doesn’t have this feature on the site - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/sites/" + siteId)) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on community A - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community A - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community AA - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -914,161 +910,173 @@ public void testCanDeleteAdmin() throws Exception { .build(); context.restoreAuthSystemState(); String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password); - getClient(communityAAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on community A - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on community B - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityB.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on collection X - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on collection X - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin doesn’t have this feature on collection X - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 admin doesn’t have this feature on collection X - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on collection Y - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionY.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 admin doesn’t have this feature on item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bundle in item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bitstream in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bitstream in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bitstream in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bitstream in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bitstream in item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1096,7 +1104,8 @@ public void testCanDeleteAdminParent() throws Exception { context.restoreAuthSystemState(); String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password); //verify the community AA admin has this feature on community AA - getClient(communityAAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1111,7 +1120,8 @@ public void testCanDeleteAdminParent() throws Exception { .build(); context.restoreAuthSystemState(); // verify collection X admin has this feature on collection X - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1126,7 +1136,7 @@ public void testCanDeleteAdminParent() throws Exception { .build(); context.restoreAuthSystemState(); // verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1157,13 +1167,15 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityADeleterToken = getAuthToken(communityADeleter.getEmail(), password); // Verify the user has this feature on community A - getClient(communityADeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityADeleterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community AA - getClient(communityADeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityADeleterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1185,19 +1197,22 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityARemoverToken = getAuthToken(communityARemover.getEmail(), password); // Verify the user has this feature on community AA - getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityARemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community A - getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityARemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on collection X - getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityARemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1218,19 +1233,22 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityAARemoverToken = getAuthToken(communityAARemover.getEmail(), password); // Verify the user has this feature on collection X - getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAARemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community AA - getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAARemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/communities/" + communityAA.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on item 1 - getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAARemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1251,7 +1269,8 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String collectionXRemoverToken = getAuthToken(collectionXRemover.getEmail(), password); // Verify the user doesn’t have this feature on item 1 - getClient(collectionXRemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXRemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1272,7 +1291,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String item1DeleterToken = getAuthToken(item1Deleter.getEmail(), password); // Verify the user doesn’t have this feature on item 1 - getClient(item1DeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1DeleterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1299,21 +1318,21 @@ public void testCanDeleteMinimalPermissions() throws Exception { String collectionXRemoverItem1DeleterToken = getAuthToken(collectionXRemoverItem1Deleter.getEmail(), password); // Verify the user has this feature on item 1 getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on collection X getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/collections/" + collectionX.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on the bundle in item 1 getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?embed=feature&uri=" + get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1334,19 +1353,19 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String item1RemoverToken = getAuthToken(item1Remover.getEmail(), password); // Verify the user has this feature on the bundle in item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on the bitstream in item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1367,7 +1386,8 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String bundle1RemoverToken = getAuthToken(bundle1Remover.getEmail(), password); // Verify the user doesn’t have this feature on the bitstream in item 1 - getClient(bundle1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(bundle1RemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1394,7 +1414,8 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String bundle1item1RemoverToken = getAuthToken(bundle1item1Remover.getEmail(), password); // Verify the user has this feature on the bitstream in item 1 - getClient(bundle1item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(bundle1item1RemoverToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1410,35 +1431,38 @@ public void testCanReorderBitstreamsAdmin() throws Exception { final String feature = "canReorderBitstreams"; // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the bundle in item 2 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1453,19 +1477,21 @@ public void testCanReorderBitstreamsWriter() throws Exception { final String feature = "canReorderBitstreams"; // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1473,7 +1499,8 @@ public void testCanReorderBitstreamsWriter() throws Exception { // Create a new user, grant WRITE permissions on the bundle in item 1 to this user // Verify the user has this feature on the bundle in item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1489,35 +1516,38 @@ public void testCanCreateBitstreamAdmin() throws Exception { final String feature = "canCreateBitstream"; // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the bundle in item 2 - getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAAdminToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle2.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1532,21 +1562,23 @@ public void testCanCreateBitstreamWriter() throws Exception { final String feature = "canCreateBitstream"; // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1567,7 +1599,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1WriterToken = getAuthToken(bundle1Writer.getEmail(), password); // Verify the user doesn’t have this feature on the bundle in item 1 - getClient(bundle1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(bundle1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1588,7 +1620,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1AdderToken = getAuthToken(bundle1Adder.getEmail(), password); // Verify the user doesn’t have this feature on the bundle in item 1 - getClient(bundle1AdderToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(bundle1AdderToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1625,7 +1657,8 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1WriterAdderToken = getAuthToken(bundle1WriterAdder.getEmail(), password); // Verify the user has this feature on the bundle in item 1 - getClient(bundle1WriterAdderToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(bundle1WriterAdderToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/bundles/" + bundle1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1645,21 +1678,23 @@ public void testCanCreateBundleWriter() throws Exception { final String feature = "canCreateBundle"; // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(communityAWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(collectionXWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" @@ -1685,7 +1720,8 @@ public void testCanCreateBundleWriter() throws Exception { context.restoreAuthSystemState(); String item1AdderWriterToken = getAuthToken(item1AdderWriter.getEmail(), password); // Verify the user has this feature on item 1 - getClient(item1AdderWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri=" + getClient(item1AdderWriterToken) + .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" + "http://localhost/api/core/items/" + item1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" From 76fdd16a9b3d374c3fe7f47f8e732bd9fd57025b Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 14 Mar 2023 12:57:50 +0100 Subject: [PATCH 059/686] [DSC-963] Fixed SubmissionCCLicenseUrlRepositoryIT tests --- .../org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java index c306691eb352..30404e030ab6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java @@ -21,6 +21,8 @@ import org.dspace.app.rest.model.hateoas.DSpaceResource; import org.dspace.app.rest.utils.Utils; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.Order; import org.springframework.data.domain.Pageable; import org.springframework.hateoas.IanaLinkRelations; import org.springframework.hateoas.Link; @@ -33,6 +35,7 @@ * @author Tom Desair (tom dot desair at atmire dot com) */ @Component +@Order(Ordered.HIGHEST_PRECEDENCE) public class DSpaceResourceHalLinkFactory extends HalLinkFactory { @Autowired From fd955c49884073474842fb75d4ced2761207f83e Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 14 Mar 2023 17:12:09 +0100 Subject: [PATCH 060/686] [DSC-963] Refactoring to maintain server module --- dspace-server-webapp/pom.xml | 17 + .../src/main/resources/static/index.html | 0 .../resources/static/js/hal/http/client.js | 0 .../static/js/vendor/CustomPostForm.js | 0 .../src/main/resources/static/login.html | 0 .../src/main/resources/static/styles.css | 0 dspace/modules/pom.xml | 11 + .../modules/server-boot}/pom.xml | 28 +- .../org/dspace/app/ServerBootApplication.java | 36 ++ dspace/modules/server/pom.xml | 349 ++++++++++++++++++ .../org/dspace/app/ServerApplication.java | 10 +- .../modules/server/src/main/webapp/.gitignore | 0 .../app/rest/example/ExampleController.java | 0 .../app/rest/example/ExampleControllerIT.java | 0 dspace/pom.xml | 6 - pom.xml | 21 -- 16 files changed, 419 insertions(+), 59 deletions(-) rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/index.html (100%) rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/js/hal/http/client.js (100%) rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/js/vendor/CustomPostForm.js (100%) rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/login.html (100%) rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/styles.css (100%) rename {dspace-webapp-boot => dspace/modules/server-boot}/pom.xml (73%) create mode 100644 dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java create mode 100644 dspace/modules/server/pom.xml rename dspace-webapp-boot/src/main/java/org/dspace/app/Application.java => dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java (85%) create mode 100644 dspace/modules/server/src/main/webapp/.gitignore rename {dspace-webapp-boot => dspace/modules/server}/src/test/java/org/dspace/app/rest/example/ExampleController.java (100%) rename {dspace-webapp-boot => dspace/modules/server}/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java (100%) diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index c884ea7d57c1..0d3458199d37 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -40,6 +40,23 @@ + + + com.mycila + license-maven-plugin + + + **/src/test/resources/** + **/src/test/data/** + + src/main/resources/static/index.html + src/main/resources/static/login.html + src/main/resources/static/styles.css + src/main/resources/static/js/hal/** + src/main/resources/static/js/vendor/** + + + + modules org.dspace - dspace-parent cris-2022.03.01-SNAPSHOT .. - ${basedir}/.. - - @ + ${basedir}/../../.. @@ -108,23 +105,6 @@ - - - com.mycila - license-maven-plugin - - - **/src/test/resources/** - **/src/test/data/** - - src/main/resources/static/index.html - src/main/resources/static/login.html - src/main/resources/static/styles.css - src/main/resources/static/js/hal/** - src/main/resources/static/js/vendor/** - - - org.springframework.boot spring-boot-maven-plugin diff --git a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java new file mode 100644 index 000000000000..f46532ff14b5 --- /dev/null +++ b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app; + +import org.dspace.app.rest.WebApplication; +import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; +import org.dspace.app.rest.utils.DSpaceKernelInitializer; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; + +/** + * Define the Spring Boot Application settings itself to be runned using an + * embedded application server. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SpringBootApplication(scanBasePackageClasses = WebApplication.class) +public class ServerBootApplication { + + private ServerBootApplication() { + + } + + public static void main(String[] args) { + new SpringApplicationBuilder(ServerBootApplication.class) + .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()) + .run(args); + } + +} diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml new file mode 100644 index 000000000000..65849295e8c4 --- /dev/null +++ b/dspace/modules/server/pom.xml @@ -0,0 +1,349 @@ + + 4.0.0 + org.dspace.modules + server + war + DSpace Server Webapp:: Local Customizations + + modules + org.dspace + cris-2022.03.01-SNAPSHOT + .. + + + + + ${basedir}/../../.. + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + unpack + prepare-package + + unpack-dependencies + + + org.dspace.modules + additions + + ${project.build.directory}/additions + META-INF/** + + + + + + org.apache.maven.plugins + maven-war-plugin + + false + + true + + + + ${project.build.directory}/additions + WEB-INF/classes + + + + + + prepare-package + + + + + + org.codehaus.gmaven + groovy-maven-plugin + + + setproperty + initialize + + execute + + + + project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); + log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']); + + + + + + + + + + + + unit-test-environment + + false + + skipUnitTests + false + + + + + + + maven-dependency-plugin + + ${project.build.directory}/testing + + + org.dspace + dspace-parent + ${project.version} + zip + testEnvironment + + + + + + setupUnitTestEnvironment + generate-test-resources + + unpack + + + + + + + + maven-surefire-plugin + + + + + + ${agnostic.build.dir}/testing/dspace + + true + ${agnostic.build.dir}/testing/dspace/solr/ + + + + + + + + + org.dspace + dspace-server-webapp + test-jar + test + + + + + + + integration-test-environment + + false + + skipIntegrationTests + false + + + + + + + maven-dependency-plugin + + ${project.build.directory}/testing + + + org.dspace + dspace-parent + ${project.version} + zip + testEnvironment + + + + + + setupIntegrationTestEnvironment + pre-integration-test + + unpack + + + + + + + + maven-failsafe-plugin + + + + + ${agnostic.build.dir}/testing/dspace + + true + ${agnostic.build.dir}/testing/dspace/solr/ + + + + + + + + + org.dspace + dspace-server-webapp + test-jar + test + + + + + + oracle-support + + + db.name + oracle + + + + + com.oracle + ojdbc6 + + + + + + + + + org.dspace.modules + additions + + + org.dspace + dspace-server-webapp + + + org.springframework.boot + spring-boot-starter-tomcat + provided + ${spring-boot.version} + + + org.apache.solr + solr-solrj + ${solr.client.version} + + + + + org.dspace + dspace-api + test-jar + test + + + org.dspace + dspace-server-webapp + test-jar + test + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.security + spring-security-test + ${spring-security.version} + test + + + com.jayway.jsonpath + json-path-assert + ${json-path.version} + test + + + junit + junit + test + + + com.h2database + h2 + test + + + org.mockito + mockito-inline + test + + + + + org.apache.solr + solr-core + ${solr.client.version} + test + + + + org.apache.commons + commons-text + + + + + org.apache.lucene + lucene-analyzers-icu + test + + + + + diff --git a/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java b/dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java similarity index 85% rename from dspace-webapp-boot/src/main/java/org/dspace/app/Application.java rename to dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java index dc84b29a5620..34acc778b7f3 100644 --- a/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java +++ b/dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java @@ -28,13 +28,7 @@ * */ @SpringBootApplication(scanBasePackageClasses = WebApplication.class) -public class Application extends SpringBootServletInitializer { - - public static void main(String[] args) { - new SpringApplicationBuilder(Application.class) - .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()) - .run(args); - } +public class ServerApplication extends SpringBootServletInitializer { /** * Override the default SpringBootServletInitializer.configure() method, @@ -52,7 +46,7 @@ public static void main(String[] args) { protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { // Pass this Application class, and our initializers for DSpace Kernel and Configuration // NOTE: Kernel must be initialized before Configuration - return application.sources(Application.class) + return application.sources(ServerApplication.class) .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()); } } diff --git a/dspace/modules/server/src/main/webapp/.gitignore b/dspace/modules/server/src/main/webapp/.gitignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java b/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java similarity index 100% rename from dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java rename to dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java diff --git a/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java b/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java similarity index 100% rename from dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java rename to dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java diff --git a/dspace/pom.xml b/dspace/pom.xml index 0dba032e688b..d5e7108fa52e 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -219,12 +219,6 @@ dspace-server-webapp compile - - org.dspace - dspace-webapp-boot - war - compile - org.dspace dspace-sword diff --git a/pom.xml b/pom.xml index e17dfbf384c0..a5d5b12f079f 100644 --- a/pom.xml +++ b/pom.xml @@ -798,21 +798,6 @@ - - - dspace-webapp-boot - - - dspace-webapp-boot/pom.xml - - - - dspace-webapp-boot - - - @@ -1167,12 +1152,6 @@ dspace-server-webapp cris-2022.03.01-SNAPSHOT - - org.dspace - dspace-webapp-boot - cris-2022.03.01-SNAPSHOT - war - org.dspace From 47fc9169179d738739cd9d0a56b51bc852a6e6b6 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 14 Mar 2023 18:36:50 +0100 Subject: [PATCH 061/686] [DSC-963] Configured spring boot maven plugin --- dspace/modules/server-boot/pom.xml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace/modules/server-boot/pom.xml b/dspace/modules/server-boot/pom.xml index 313cf0e78f62..a1dd702f97c5 100644 --- a/dspace/modules/server-boot/pom.xml +++ b/dspace/modules/server-boot/pom.xml @@ -108,6 +108,14 @@ org.springframework.boot spring-boot-maven-plugin + ${spring-boot.version} + + + + repackage + + + From 06e77f354ca4d4ca1cdbd75200e97d824954c70b Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Wed, 15 Mar 2023 17:58:16 +0100 Subject: [PATCH 062/686] [DSC-963] Suppress checkstyle warning --- .../src/main/java/org/dspace/app/ServerBootApplication.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java index f46532ff14b5..5efa79a02aca 100644 --- a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java +++ b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java @@ -20,13 +20,10 @@ * @author Luca Giamminonni (luca.giamminonni at 4science.it) * */ +@SuppressWarnings({ "checkstyle:hideutilityclassconstructor" }) @SpringBootApplication(scanBasePackageClasses = WebApplication.class) public class ServerBootApplication { - private ServerBootApplication() { - - } - public static void main(String[] args) { new SpringApplicationBuilder(ServerBootApplication.class) .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer()) From 372b7e339461a11e57937a945e027b1c42e429db Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Fri, 17 Mar 2023 11:00:02 +0100 Subject: [PATCH 063/686] fix #8719 update sequences --- ...7.6_2023.03.17__Remove_unused_sequence.sql | 13 ++++++ ...7.6_2023.03.17__Remove_unused_sequence.sql | 13 ++++++ .../postgres/update-sequences.sql | 40 ++++++++++++++----- 3 files changed, 56 insertions(+), 10 deletions(-) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql new file mode 100644 index 000000000000..47cd157336af --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -0,0 +1,13 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) +----------------------------------------------------------------------------------- + +DROP SEQUENCE history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql new file mode 100644 index 000000000000..47cd157336af --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -0,0 +1,13 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) +----------------------------------------------------------------------------------- + +DROP SEQUENCE history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql index 749f82382c9d..f96434f1ba8c 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql @@ -19,21 +19,41 @@ -- JVMs. The SQL code below will typically only be required after a direct -- SQL data dump from a backup or somesuch. - +SELECT setval('alert_id_seq', max(alert_id)) FROM systemwidealert; SELECT setval('bitstreamformatregistry_seq', max(bitstream_format_id)) FROM bitstreamformatregistry; +SELECT setval('checksum_history_check_id_seq', max(check_id)) FROM checksum_history; +SELECT setval('cwf_claimtask_seq', max(claimtask_id)) FROM cwf_claimtask; +SELECT setval('cwf_collectionrole_seq', max(collectionrole_id)) FROM cwf_collectionrole; +SELECT setval('cwf_in_progress_user_seq', max(in_progress_user_id)) FROM cwf_in_progress_user; +SELECT setval('cwf_pooltask_seq', max(pooltask_id)) FROM cwf_pooltask; +SELECT setval('cwf_workflowitem_seq', max(workflowitem_id)) FROM cwf_workflowitem; +SELECT setval('cwf_workflowitemrole_seq', max(workflowitemrole_id)) FROM cwf_workflowitemrole; +SELECT setval('doi_seq', max(doi_id)) FROM doi; +SELECT setval('entity_type_id_seq', max(id)) FROM entity_type; SELECT setval('fileextension_seq', max(file_extension_id)) FROM fileextension; -SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; -SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; -SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; -SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; -SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; -SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('handle_id_seq', max(handle_id)) FROM handle; SELECT setval('harvested_collection_seq', max(id)) FROM harvested_collection; SELECT setval('harvested_item_seq', max(id)) FROM harvested_item; -SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; +SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; +SELECT setval('openurltracker_seq', max(tracker_id)) FROM openurltracker; +SELECT setval('orcid_history_id_seq', max(id)) FROM orcid_history; +SELECT setval('orcid_queue_id_seq', max(id)) FROM orcid_queue; +SELECT setval('orcid_token_id_seq', max(id)) FROM orcid_token; +SELECT setval('process_id_seq', max(process_id)) FROM process; +SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; +SELECT setval('relationship_id_seq', max(id)) FROM relationship; +SELECT setval('relationship_type_id_seq', max(id)) FROM relationship_type; SELECT setval('requestitem_seq', max(requestitem_id)) FROM requestitem; -SELECT setval('handle_id_seq', max(handle_id)) FROM handle; +SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; +SELECT setval('subscription_parameter_seq', max(subscription_id)) FROM subscription_parameter; +SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; +SELECT setval('supervision_orders_seq', max(id)) FROM supervision_orders; +SELECT setval('versionhistory_seq', max(versionhistory_id)) FROM versionhistory; +SELECT setval('versionitem_seq', max(versionitem_id)) FROM versionitem; +SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq', -- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq' From 5d52a2a512ca935fc76028ca1c872883d919d8fb Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 20 Mar 2023 17:11:05 +0100 Subject: [PATCH 064/686] DS-8719 - added if exist to drop sequence --- .../postgres/V7.6_2023.03.17__Remove_unused_sequence.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql index 47cd157336af..e4544e1de729 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -10,4 +10,4 @@ -- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) ----------------------------------------------------------------------------------- -DROP SEQUENCE history_seq; \ No newline at end of file +DROP SEQUENCE IF EXISTS history_seq; \ No newline at end of file From e0846452e57113c930e42b3fcceb455636bd6d5e Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Wed, 8 Mar 2023 10:42:15 -0800 Subject: [PATCH 065/686] Added test and format registry entry Added jp2 resource --- .../config/spring/api/iiif-processing.xml | 5 ++- .../canvasdimension/CanvasDimensionsIT.java | 34 ++++++++++++++++++ .../org/dspace/iiif/canvasdimension/cat.jp2 | Bin 0 -> 72794 bytes .../config/registries/bitstream-formats.xml | 9 +++++ 4 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml index ea2c6546085b..fa203fe568e6 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml @@ -6,7 +6,6 @@ - - - \ No newline at end of file + diff --git a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java index 038654af438d..7dba38c987b7 100644 --- a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java +++ b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java @@ -353,6 +353,40 @@ public void processItemWithExistingMetadata() throws Exception { } + + @Test + public void processItemWithJp2File() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jp2 image to verify image server call for dimensions + InputStream input = this.getClass().getResourceAsStream("cat.jp2"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jp2") + .withMimeType("image/jp2") + .build(); + + context.restoreAuthSystemState(); + + String id = iiifItem.getID().toString(); + + execCanvasScript(id); + + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("64"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("64"))); + + } + @Test public void processParentCommunityWithMaximum() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 new file mode 100644 index 0000000000000000000000000000000000000000..a6649c088643cfa44dae80c03b1fbe095880b724 GIT binary patch literal 72794 zcmeFXby!u;*Ef9V?k)i(rIAoTx}{sX8xC;*B}JqKB&55$yQI52r9m1gk$lcU@f*MU zzMuEH-s}DAy*KPVv(}nfv(}oKwPAk-005v`Do{|MA^Rf(002@`&|5o8J9Y|i9RMjU z?cNxIZwMXk;9z18eRp?p1b_oTy#oN?1m6KrDFgx7m=pjeL`(oY4;nNf^0p9Oh4ZUE zp&@j8SXhZ(#(~i^G4DFq=M5@m_PnsLjbb^VMFqL8yz`SZ?o-cCu&~oTorj;vMauov zVW|OU&%eRKBBja2v{VW&H-F(;0-~+~VPP3%pa8zGf_16wOS`bJV1p#U2AYFQ zd<%192XNT~06;MUU=R?Xzytup2x#p91%T8s!8fski6aHL#0Hl}DA)if$PK0e03s(R z3*&#A{zvd{42$*mDAp5sVU_LweCWrL008(G0HDQ&i2v>Bb73jj zdv5^%gomd0JZeZE0Oq&v+9xcnglXw+W<{OGllAv9o_pB_L=L`xxrt)uU||cAcLLct zfxt{SND~D|kg_2VXno&8$`)i|V(VlB5kTsYQpDN;LSx+bRknBoX(4O@g7krif9W)F zkTb9`Q4!Nn0keJ$_N#+}0}yDcYyxt!lQ*<90t-|)r!|}|IK50 zJCFs?))6cxfV32`1KB<}0d~^R;SZr?<|zAz5OJ^-`HQdx{YA(aT08!c$eDqh{}5s} z)}nt1a2)?qEo@|E_B#;2q6AQq5EcPj0|PNh$(Vw|80cgu2waAc_m_l&?SJrut^c!~ zu!FIRsx3(TiIO#BZvR}u*2Wb76<^8G8g$QBcw;T7^rtzpvxyN1=pbqUGJtq+Z=`~m zg5$%gfYc$4!6n3OfQ*UhFE#%fRt0GHUkRwX5h#Ie5(a`mKpShIt=aD`s1Fi|34i0GJ@Az*%*_8TKzR^=ZTr(6vT|Vd zf7t+#1ecI>@w>cVRW$c9^xuPm3xRil`}GEMe9r@?%)9&R7Z(=oo(suMkM8>u{gZ|{ zxu=otpWp}7y)IhFiUqI#Uuz!1zPIb_o+}ApF*OBCtiezCdk)lZPFh_29}XnBKmjzt z&xbag_Z-N6_|3W3c>$&Ykp1?{>VH3BzCpzQ=KiAp-^lJ`fe6w4usfjz!MhcF|Bl-u zNDTmq+ppbB45>k%gCCGyf9+_1t&=q*@jx~VqM?DK$=?zW96%M|2+k-DU>VqJL!gtb zvE#qCn-Pc&EV~~EvPSAGObwjw z^B|J5i38}rXs-6a{%3O(Lo*SeHPGR2!NvNeUs&Rg0Ne#22egI68VLln1HVrlP5z!} zkgdTP=|8xrhW8olKlmu%DK`IC^ZVUG2N?(az65UrfusBSubqK=Ux%#V`z!$qX#v2> zfY9JO6?|KPtIOa&hQM_!e*hr>>Tc$4AAl-k3^X*+6Ojee2LSYY3Sj&W0B(FY5BLB; zMMOkILPSMELd8NsM!~|vKt;vCBgDbM!@(iMLcPC!7r%&qKcUc2P|(oPFwxO5anaGy zaUlG4k!Vzm2gn^7uaCP1p|PB zhJ}MiKtw`D0VnN$3ZVedFb_gZ02C|~3^Xh>90EKd91Q9+un-FdmJ*u{PDs%J=dryv zJ3MZ9TAA<@Dmo}s_LBFI8!rq_KVK0?wZ;>=ax`6bMcSKsp+2H`hf+2h6Y;; zdv6m0JRIk}36yNuuwV=9ALGEWdxzsfENE34J$goExZ?0WLPQzQ$dThaH9W)s1RBn^ zF|ZLC1jb^jTp*LxKPLS57SR4>!rdGI73O|SEPw!DbDzX6-YY`7+tj!Ch{5dH{D-BB zx~r+@*psBMsT380L@1tQy1(oA8dS1GL^=-2uewxoV{VJ4)CH3UPuRqqv=^u9@M8U429Tda&Geg-^U5J#8AcB5R5d zyaVX#qG#U6<|CJhU}96zi*{ZuZAs(o5@p^;$N1XAK?#7o6Kei^Rgm zhlMX+))J|&AXlp)IZ^OPWUtj1-vJV?mu_rCKB?!L4+hMv@HE_TUt64BB{J*~4tGXA zs$xFdy{$;7F1P~>6mMiNefK#tSss%8cr%+lY?rK6ReY(}udBkgZ8O9aGR#~1X>4hT zd`h(J2gklSg=otr!l9@026+>U0O#!hpG4$#_wE z*LZs(ou`j-&UPQ|8K{fdq}Vjf&F@<|k1e_^Ryf{RRo?+vmo{B)!|wotu4?+OPsl=@ zGMf&l56jzLC9Zl*y*y-g-1FMfI1|YZ6F$D80FwLPyi(*TDUw?V@=20SJ#f_YsOA>g zkr-W6uWM*d^7?M^I_``trt2}WM2Q*<%Xm-mIoTJ}>H-C*Q%N6#%H;|&)T z&5{$U+nVFqXBe2x8fhDHgdHdoTQQvn^*e_kr5wt}$X?&i={8%?@tksl^!U7d#px%*`$()Je8^t&E9R2PLt=D;T92anm&PLG(+0PQK zBJ^=AXySn{RBwZCCh-!6=Y*$7_CTGH-j|b8#FcHre_7dHL(m`pK4octIh8E z#k`y%z(Atlmfz#H6j0hByqKA~JeY5;seFz;@8ohrO^BC%RetKL$vl$4oN9cua_B$*JPAcQsn-3-JitkjS00NEG^EA3ImdU;t{D z^3n4f*mkUWrd>lyk5>TrUJSN}ne`1d{0IPt!j*j5I?EH-IH;LtHJn4yZ1rT^ctV47 z`+fZ$NttwhRYR2R5m`&(#A09Eq0)$-Ol!p*#396i^ByO;gx+YtQ~ia$Ub|0AT?oeX z@^@qFOIEYS5iXBJza8#-(aYAUv9@cA$Q8kEU)%wfBDJ=4G#o*UG+HYH4Fb!bB`^iY z^;kNUSDCsc&a1_2;6mpl0?=_b$L|0h;uX6H8g&VBI)$h{X!h1vr}EYKLX+K%R^61O zfHAV}YR+3aqW=0^XVscT$Hc7&-9xuP>iHZAUdJegriqy`naUPModlMWTf=a3vUPdzf+B6wHw4q1RtP zq2<^8T5YqDEfm%55&U?K*99-$L0E$9oBj9P{7@6-64ks6HxjvDNJ!4?7zZl-jM!runQ z+%%yv?ax_ET?Ae^_92*)8CofBH{!Nw36d<10bk#upEief-QF&Z6^G@4t{BrTKuy`N z5Mjx83NeJczAPT_oy>1WWo3IShSpe(I~GLiRSx*gJQtV>DOSsK)RlSU^lSl*D}S%k zsmPIZ3CQlny_qBa_>t zBy1RT<5v1_$qu!gLT~Qq@A?!%vQxj}V?FW^S8vS z)aOp)%eQowEmrZ(@@f&k=K#@1<%B%pfRmlBJ{kC(DP}E>NXKgva764+cT8y z5?qA3!TJ_4bw8Z4@7KHhp_{jcn^q)CJA8*v{Bf$%IcIg3HuH@n@|^Af@@6Nz{P7iX zRt`kluP=r916{`0u8M%wUv_c`zajTelw}BT&2#C(%(WgMh*!wliqzel<@oyeTP?62 zx0>ja4cm_AF8&;{(m7D+tX4Z;dms6- zON9zumu@gZ>sWUHmEu(+!vvUqD3@pRVxL&-*_YDgnO3YotB4CSQKHtV@veNV6i`d$ zi$DNeug;{_`>yD#V&@{;u~opv70DeyOta;3Qf*T`lBK-YkHf89;&6Xn)vNDJV00!Y z1dVVoNjKnNk6P?lJRP%?CmY?!$MP*)%Zr8#o$*tn`T5zaosFm-8jzfp&12LLvL#I6 zZKmN3Dw<1Z{D_k2OLr3|)y>+1KLC7Krmt(iy?J?Y$@lc#yJev%%FmptNA(2Rgjs~Xz3sE1nQiAH z>*6MQ1EWOpvte-`c;K}hfFF4PZ*o~1Fy&@-`uY^FpAZe5!Ui9U`9RcsQNT&LtRaAL z%E(GQa~GV+Wj*?zaGNPc#qdI@&aoOrtt(-GcBcmXs z!{rp`x4dDtQ- zqAy;){L*)6mDq(_U7x7&Dt=#cOC%nn(#Gy)DdX^Zc#Pp<`q`M#BatEt{*X*b*elC~ zv<8p;>j}{j!_yz!Rz0L|X!0AtZ)H43^lSnAn3-X>*s*QikKHb}4%}(#rft!CtmAi+ zCsIi-FpEcv9B$jFc5{xsPAcJD-NX;2-GFDbSNth8*Bfpy<30^zho?1nfMw5=&D&!? zt!I8EnI5vKgtR_K*V}6584F17PqK@~4<&L&&lbteE@^KVy~eK3Jd#s#^iD`aUAl*F zFXC++^%$^*=3WJ&{dkT+jsdz_$kp+89=qJ3>Dr-?IX}Hv*zeCBAHN>X`f79s;1MhK zNO{$06Y^zNP^TcH&**evH~I2gx99XbAs48Q-TGnRm}Mmoyr*By)T%^XLKQ-bMb*$k z{VUor&}DdoWWY6^+HihP()^d!u3O%N8)B;D9&~3G(P67s$E0)du-#+Mhdc5&H=|KKfZu78gDA$;f5y-EpeVYrRW>#Q`atX`&AgTV1otMjO#SO85CM6^|$md%V*$+%^mjoRh;@w`{6(<5-*R)Y67Wj}d)+`dcBrCxTmeY=Twx(8d;CJ2_p)9jGjc_Sfb z=Cwf2rSi6nz_!gDpydv*>y=!^_QLuggg=GU)9d;vrPwadTUz9}o7TSaf@Lp}g3J82 zQZ6*dhP&Bc-)|`IKVPVWpHPqwHW&aQFlqz@7=k}sQh>n`b8sC5z8&vt;19I`-2Y1p z3NW(73cev9Hh+Cj#z+NM0r2=XKsyK+0Qpu5847YCfbU;*@blq2?{6LygoZTw+jZae zfe-so`R__dM~H_1z5h1`|GNhM|G5VKo;;$!#9!DAx{85=nF$CC2SXq}2x@YVgUNzG zBL6^7kmU?O7S1M$P6iO91CqS}5Xkd4esZ6oAh77)X$W-scNz@#+B^UW07Yxi+uuS+ zIzu+Jc5)E10i#182-*r!|NFu)u`x6;mJ$&GW6Y)&)+P@qBr=2$2Rhh*!Ixj~II{R3 z4y3QNf|vxjK(z(R8h}71_fWw@XD~tzGy@+wa0I*bz(aKf;|IoI2dynYZ~x&tk|snV zWorrq>w|pcM=~-uu(dU@{sk8OPP}L)Hg@I)jux&a5ZvTJE9M`r+C7T<2cCvAwFUwm zeuv=xErgKws&f=JZK4vZXvV*}9z0Y`;R%)uJ}Oh2R%z9h)T`av!Nw6O!PIa|;JlnMXa zBIw^G;s2$CL;I;xCeFr+o2v*0Dh}{P)+)q{twk) zl~@m}`cLd3;ea>|k;s70ONfDw?ASU&j=nqu3R40o15QQ{Q}9=w!@QqGSpz#6Ab16V zyZjy)aySX~K?cD_fAhg?0EU#Uk+qXCM4Js{5j_l#{hP08;s8#H4q#~ej|g7M))?Fr zqU3izLy-X^-(t3q?cn%ZH)Q%ze$AW411t;~4_na*2!d>;KfCRZUMyuh10xd$M=={a z&|48OsQjP{((geC28u1f*%xGB{d*utro;H%69Nl^H_zWisLJ4%hl%5_?Eg2v(Nsj$ zEsR0tk|uvUX99^k0(h7ID*?cRr$R(o`62$0Oe6z#9^%%Y*?EW?zN`tzz!-e~=9lmP zV;+DUjX(isBM~P@@L2^Zr3W1dV4j$Ul=uS;52iI_6&}XN`9&%Nog9qrH-oB@)PoG= zK65Jo!8zpb?@aUutNtN@%n^pVg9XS$*231p#=!dbAon@uo)2cI0F^C_O^gg2{;};4 zz@dgL%0Cf>VBQ$%V1?jw9X5^}F5vS1OdR-xpV6KCogx7IbshY#3*_X*{b>vc_3I4@ znZVx{3<4C?uirZ*Fmw*ag`wbJz)10(HULLZP*PIi@4t7Sz$b1X*AM{yE^_I;0*R;P z>wyh#wwz}&XmzMUW-l<^D3`uyvEt$7g~@0qNf6{X=nn;XPugH`|GbhDfDOt{mkz2_ zbwADr={|kJqhWQ0#rZ+`D|H2I_k=a=O#o^U7k}xJJ|Q#C_FU~K+qV^3)1_UtS0U|h zShv!Do_@!E$;;9D`fO^QW`$ru~8MQEeq1P!jiEB%~9c&Ar%5cY-Qn@tr^}ceJ3{- z4#0V;(>~C9(t1+hv!w{LB~9?N>=To$c1Bc;%!~({+QyRv&Zg&=aZDy9eEo4vR^?ZF zrdQ=Dr)yzca?itky`C)c0 z!oDax{Z5HoQKvwt89BAzEB)A<)oq<6b3t>RjMVe1p|~v{mI0r!4ipCrN;W^ouqv%5bFq!EAi%} z>ZaSQjnchMB@`4%`{AWMrky%!tGJ$!51doc)~EbpXgqX`C{o{JYct*t4M{OkqX+xS z5|uJE_`PW`$h}mFWGeYyTHFTMn{h6Pv#e3b8++v=(_uj{2s> z44vhjATor@3(+@mr~L1p6(iuep2}C>LN% z0aYX50Xj++9-LqBYDz(T(t>kCrc5G9R-hKKmpD9l=W4XiERoz3d z$m^?PBsXXX-7lbd27G(g*cY#MK86G#}YT&__@jfde8ZxbIB1QAj$aNgq{`JR|jyVA9kNG5L@fX6}vD*DJNY z-J)obte)JW?U_;bxXz!KGM%xE+~0kzV0vrVTvi;xyK7!?-v7j5jQ)um z#!_Sj`~DYIxmsyS3#t*BbhRH^)LZq*^;fU03x7sD`aX!4cpNod<8>#OH;oV?dTw37 zZCPn?s`atP&vMEffe+6fjAmm;_70W!VlM*CcXZU={<+GyH9 z&-dX8mV>A`LKIZ70N=C}hlEIDJ=rv#loxj}X26IzP#KIm#yZl7xjd;Qzgd$rQucWqLg7JaFM4wc%1y>hhQ^FMtceI5whTC9%4&WD8 zz2gn17k5^qx=z3#=ZU=^=5!)Pm+ew?7+Oa8TY39VUQTHipjaoq=7UBzMrm^BDh6&> zpMoxoyvLLX@HhKG)}p9$rtd<`DQaMEpyA3rfE zFZJvv%JTjJeM^d2>}*=P1>NcS!U%EU*<%?w!TOMu7q7DMB*YGQlMc-jE@yXc6ZScy z;(f|A-~R~T)h3ucw`>(GWZQM+(?5aALl2;A{vs}L6~Nn;Y@UNj^j=y@RYJPbq8&K$ zoV0Bf<_xgFvr+-4gk&QlKXQw(OZ!&-po2?TR}<>wizk&P8U-VMNA!&e^-5Ck9yg5j zuJ-5XFy&$Iy;t9tIm#L@CoDhuePW9=eOVqyS=ssC3wFGhx?^jKPlI~9$GQohm!YGH z-k&B$ok(hEOAhaIA?0xmzJwewtX2_awiYwx1qbsj9wj_>Z*#`ilFbj?L|faEf_Clm z>{)qG#MH3)SzYS3mjt{V1W8N5W^7t6^q0am>(m{r*to(U>2L8`;T)c-ST?PM+)8^1 zo`ZbF{Dlpqazl5xU6G%jQ~PadRu(FnnWIR(tdp}j^;n$4>F*~ptbnn8NVQm3FyK_X zc7E#SAx(JGgr}!8CEzbru_ML$YG|>YR${-04y&NIxF0nJ!(M7<@g~>TG;Z|cJQCHp z9Xu1N{X&D_zPet~te^*AuE*#H^m~CiDaze>^i=8fZ4b&A$)zy8Bra&B$j1f5sbYP4 zSzNUr$8}V#povB)Vm%eJ?41;E9@7HzIS4n12T(lF+Lmc_Fx|Qe=@beR#%eX5gY%J{ zHeuajtU#(|+WGMu8-a4Vjlb*hkzwU~MapE*WnQbkOmfa;bh(C&_ReDtf4lEN>|`5x z?^Po%W(XAur~9Ef&DCTU=hzSRtU!&NMVZCf=d(67B)vChlKJvu5igV%QQAhv100EG z!dlvGlqp!%y$|M4s;kcWj1uNW1)Sd_g zz3tLzs<*3Nh;cHy;^e9~R?>fbma}!q0BT`=ClLi7{V1#0S-xZD{gkMpGFa-=bISeKxpZ*msyLE^*5@ z>Ef1{VTgtc4@OI!Hoi89MsjAZgX#CZuHa!|hZ}<}2w8fOAT>RtpYNgE?{nPtK3FJ+ zuXD;IOrtK2B+gE?M2;hrNT7F#K0M#T+t1SG_lF^scH zQ&6G4x@^u}*YPxxWq>HUTpQI^YtJ501iUXOR?valKB6{tyPmF<#23Fw@8jje#Zbc2MR?4>zss5Pr?E_iv8hG(28>%<}j zU>&B6Yli9OgkC*nP>VZpoVPwme$rcO|W#kU98R7m%QhzmLOUr`N0* z6XJw(7tk0C^E8IqxhT@Wz*>==u#+FMe(4;m=nx-Ckg}Acd-)c%%&_ucB!ojlll}F_ zZRm1iBeJNX+&~J-Eou%5J!4KRAy%w5l~l*_kq^7^JCfp|ZJ+CG#*<+NO<-*l4qwEX zb`0cG$LLRsh`?1IXT_Z=1V4W-ir&v(R%%^0y{-7k1k8K6@Icrf(5HOvNUh(j|F6VwVx<^ z=QdIr%D(LDJdlZ=WOmt7Y5gD*bvE#voh)Qv$p7oM zHawOynxCL5;Bb6N+}@p&pgN+KxZOZVAjYTAPAT{^m9bb#mPvP*PCGkh>JLZyZpli0 zEG=AD%RC!oL)39OOzfn{^P*3z&B=GV%SmRJvPs zLJXP^>He@tM7pxRRPBR^O?rM)y_B5hQyP3V!D5=49Ln?UoV|~t`q9_Gxu{&KyeNE8 zLVLg%-&n^p#oT5MZjn!RQjq=-UrLn9RbtdLpNZ zgnbLaRz#jr#j?*aMkpx#Ke>;Ici*{*Dzd*WXI5`d?gN}*zH#2aQrM?pL$ayrM+y3r z@JUQ=*sVOmHrp*^;4|D>i~ZS}=d*SWgp7f0`A22z=w7iGZqu1&`0V&08$&LK`f`sO zj@dQ85D6p5kKE+ss`l~DtqIr3mp)$xbXOB3W(a>;79>$E!ri!adjw63T<5Zk>}~-x z1T>x&b!Q4lXW@rTT^dydU?LuH9~6ip32sv}B=`29tUq_EY26BRws{4gYT|Jgbcf%g z7(vbMBz+Fuel6EgfQ4AQnqb!Aw}eYv#GG9WRS5N7#RfM8A6e;RxFY>TLF!t==5l37 za#$PhIY=@FZX;J{l{Ans9&bnVsO-ezpfOJoQ=?#=$%hOe5y-CjzHNf2${i5#`erQL z$vCL+l`x$}7Jbx-T1RSG{zX6(t&ZK#_M>)5?Ob%aA93^gT}l13^Uf2vlRBzv)$#O% zpSi={5#p<6&uql6@%5}+R@1nLv3h#8FHwX4o0#R@a>!G}*DIyt&?yvWqBJKT(?9a&0_7T|;3ATK942xb6YsPl?mvEF-+5GmLDSA-DIwQ9WsxQPH}a>VA4E zTy%(6Olh_LYlUBsN}~tyJm)CIj?0-$|MoJ!Rd^VuLGY&A9Nb?T~6r*@2FXcDHXa1E4kZSL5;TS zXmV1g>V4))KV2&=txE!RGRA3r&b@BM<{ZKGN?mj`%lA(g?W^wC&rnmQhd`1a`<@sZ zAo7?RY{@3Kdh+3gGL_@2k~o9J_Hv+$dy<@?F6p^knuW^My;%f1qT`5V5J=glY0 ze3X{CZa&%zKL{=#t}{VG9{$n8{gdtGI`a}!Ht=IZz7PXVJs$VYsn2z9P{r!|R$KMt z?u;PB?Q|)(XH!uq6(th(>`?A1Pk?S^I*E~Z8^qLVLSor8@GwZFg~mR}Gf#M67$Zkm zp-w4yk}mG&oo_n+rL1kpB5rc?((W+4LpE<%FQH;xi`KEHpcC&GQ_4t`X!{pO|ULOmy z-V)~{W9_$XazeEwTtt_3zoKyhdB=5q90)5HhrIlcbW4X z$Gl@`N7^hk3}OMWEyR5=F{E|(mUGRex1>ATPZWqyEkoF?B*O0v;eb}>( z60mlsyf__b@2}J67ABkewzcKNu+x6B;rA`}WZ8vOcjT(LvHls_wq&#g9;PkU4&Oxb z$Gr_+-~{T#CtCkXWfb$2A6$hB8u*9eTe!Qd8}F|#3Oenr3e!wJm35vAp-L>MwOBSZa z(+;|jP^NU!bwEPQHnYH`dXLeNy5rxyoFs*xS)_@-R=V&-VGlu+fYytdLBk_D{3vw3 zfeEQS)5=(Bav{NYtr>YX4U0fyTN1;WB#2-J!T-JGfxZQ0C=5Yjz4P@otu$>SmV9t- zTsy69hd=sLxf_+~&sE_fdNlS?%JFYsYw(B)^1qPNFi^vvF`h>)vORux$5No3?D}^5 zom7ur%GUw{+f41pfiuaRLksGb*E{O#trf^$ZfhSaQoaCvvyDbcJ+iLCsk6i&1L(^0 z8=l!MRcB5E*b~#{soda4ur7FaDT)K%Z4{TQ_j5YT5RiJX(lua=oe{K z5D@b*Jsn|06$~lu zUtrMGDG--@RxYb--9zkdG^_X}Vg=Mnhr{jSbFxXPt)%!}*PxWw;^6W6?Qgb$FFM>& zriS2VE|S0RUWhJ6qOYE1J(^3i-T7La@UJ&P6VT!JziyCCY=nM;;_9AQ^`&8$I*=rE zQI2TCm(!TVv7#7*{fZ?#Mf?0D3^BIdST+I1iuDR4Zcqzj4|;(kAs7R(XB0hIwHHq@3XjE8wjKnmX# zg6M#CgO#aBQAF#om+$s!swWT!2B|u146k|~WD{RNo~i^3&_jB1!jlxLlOH8jcB_IF zid2*Qy`131ylV%mQN`QLp`+Sj{(Q2vbvC<`7@YbBb=o=gKV7);d_p;x`eJ zIwd5c0wS*uJ>P}&mwknrATB}qFy;QOjC0sgzrp<0pTYg;tW_mGVPJbd%N@XfD`1oXAaEUaQuZP4$_V+l&Cl1tz!dFzJ{?s3J}zrhwv z5}a7MQ=)fLujFcU#MQOcqDxEmR+qkgOj66uWVFo>h2MkH7|grIYRt{K(Oog>zzb)L zeWSsDElR7g$sWQ&5i`-^fzlCamqvl7zh}2oDW#ptQ&LIu`Pc_7{|?4?0ri0Jki6>y z--ZKRjtAM+fe*_Jwp$jfyM~P&Ex`$mIQj~&JNVB8oFNAh#S+2*<>npptZ?goVWbH1 z^b5(f%*SLOP#avify*!inY~y6Oxduc$eiF{aLkQ@F z60JxWgW@GI$S0MewSMXOVH6E!z~4Fk9Z`sG`0UovTTgXML`C7o5(a;y5hV^olIIt|VGY-DvHA3O2}+LoyMsF~!5@gKe{}4n>sIEA zt}?Au%0#{$h?nb&f*C_3rjmn4B1=5wgunEx|I1Nk<0mLJBA{VEm#^N!&cmml7sIgN zPe0#pnrLsoW^nIR0t@tk1-f3!0_5`}X8YTR@{w|SYJ;AO2{$s0z?w|Gm5F5eNDVKv z%I3xR8yP^G_8b8X8B!4Zn2crnlIHE>!%S%A5&508pkaHJ!@i3)LLYxR#&dioegb~2T-j@rPZuT*ju^;zdg9OkV5PBANmv61uk1O(|ezv~xFIjapS6h~WvlD^cPwZ08 z*VmCYcN94>b${oD&uFAatvTBKBk=PM*{!HJ5GgCKjc!J4$c3DSq_h-~WGl3%#9;hW z`COu4IyI4>n5;u@T8vLrT>HK?d-zK-Zj%^)y5h&|YtIyal7ubsBM_nGzCrrTF`akF z*0HN3`lbu<1^F{!?2zVFm#a`BsBa@6*`f@oT8?uf z;^}q`d#T!Ugzz3Y$z-3Aw!S?vHOTm+iZELNdE{OpZ8H9fr63bc0N}9G^6Xlq*groaXxDt_BHfPeIjE-S6`nWHjRIF8kvyW zx(7p8r`7;@mhiyEmg@YKB4$9SvQw96PD)jqjJ^Yr?Xwf6`mQChwI80=Ys`f=yHBBJ zuJRWID38kEC1^>2LE;-AXqvjp(luPAkkdD9(}PK@uTVIzGZdyW(^vxaDRgqLpl8$RY5d{b z>?cywtwQu6#sH%J6dkrV?G2uOiZ#}u_GL_+FjlzlX;Dco{p9U3$UYvU@gZny5f4i^ zDN$M`?4ZwGX4~G{X>agWNqw)Lm4$~IA(Xs)?mbbhUTOHvXMI^U^VAB1=QB_&C584T zdhna}mFinw-L!^}H=`&_akuFJHcXc-zt{R*8z#EF1ZXM#mr)Cxr1FV#FS0l@= z&fLY5+ZNXshy$&hJ{05*!_L9En!;2{u$@L~U9oT4G&M;^kl*0a)PyH8G4SQQN=daT zhb~Q=Pc3_u+NpfdGLMZca!pvp#uo--_ENEOWcp{fs{{)wHGB%Wr0`sc?Aq6a)D5gh zNKbD|4X1YVyPqKkT*W)haedlX!s>88yR?M7V7`<2U)Bkj40k^_bMowwk z0<D@bfw@JwBEEp|5O6O5W7i29P= z-^QRYHgv&`4PPnaxsG03H&GG^LV&~MWqD{x`ewTct*bc|EaOM|l9z)q*}ywp@5p%- zH&!Sf1$F){zSA-!m7$$DMB!b{cMY-_Z58%2T5M^y46A5Zoe8C~MY;#l z^u3+PsiAX@hY6*{*}iO;TtO9?$H6GUFp8dB6z|6sDHnf&)p_>p4CsZVLvgIF>e1LbPdtz(W; z^Ph{$UZ@$O{*2*S;K%VMg2=6k<8-clyOo{E(Ud8% zeE_wxIK|F}4TygfFm~B&{c%ei)@z=XMk`6i>{(q$QmMWQ#{0u29coF?hUTgv_8Gp4 zW##^aQN@=tK2?*hv5>vP*1uAOjaFO(?Zll)wgyIObkejD@ z37amwvm$KOfq~R`hJTBTxyt&c4`V#r*{e#LfA$qqusQyXawP1Og$JwH)D()1{baFnO?GY8%g5dq zVs%*Cq9(e|8t>hYUb_gq9{933F#i^1Y8}*;@}BBR8HWz@XoZ*0utq}r4C>PlT#2JG z2IC&h(lVPElY0HB;xB83#%8xjmtS(Lw)DlE+29l!f05jJey2w&WAqbeXJtOUtB<*X ztWg=^N47oYokSu#tE`?{TXGrM>3h>>k&-!GN-<#=>MV9JMnf_jVH3>U%1@HK+ixq^ zbK|$6@}9vVeYS8~siV8?ed`mOuBfTHPth}HefHYSeY8H^w4oiFbRS_(XuOj@QS~j{ zjd8a07lh;Zp;h$U!zu@vmaB-T9R1K8sMANLh=#zPxr#t^GNLWY;}BL^HPT0<5!=2I zi0J2P3oo52y3=fe$TLY@J6qzpP*LcTs$rUZ8)9Sz{JhNqw}HnRyHeq4)QC{Q#jqkR zHLJ>AoI{_)Xae#!7GmuTeaO_mw#|&hrXO53{0LY(O8K-SS|@5R9nP}u$LPe<{=$`{ zKc5>)Ga~DimcW2yT+S!AMyxZfzWS7tVWtp-*BhI>_Des=>ABjAvdGgjwbwT1k4@jq zWF+ttb52V!4!SmGXCzxx>r-l8dHc{10#a6Olc73|J{n`&MB|q5Ca$DljEu-BzQU|7 zWB$q9@%%Gx@Dq|H4VZv#&=08sR=7Fbn@AkKNHIK<;4y%I^olalFKF!29ZG%U@_LMvo=Mg3=urC4jR?R zw>m$cj2T?+C_1n!BNk}7F<1Fj#p@3+SjZ#3YK~qXH=J~OeD~VsTVWANCc{X??xf~0 zRn79O{~OQ4?$@!RvT#)DS$KEP12$dZ0+_KqWJY?h+?o%52+k?Np~zdlKKdL;J`EMM zXa8Yr`^{9;YFSORP?4yDwj9El(YAaKjzQ&jy^m{pFZ?i|gN_lOGc;W1QCL*sdY6M1 z4%~j$@y%CF9x*5PFnS1Y5_;LVAj-99b{3yU)ERN(hZsbZg=tjsw>GlB&~#~?jktA4 z9l+lo31muj-ykkps;WCTEWz;Sah!;*x5EMO{NS{et%&KIPv730w$R-(qeDllK4Yuk z*+bDGjnTn9yYx_d0wPDnm%qWPTB2WV;m-YDNB?6NItskJvS%8ZsyH-L8DN^T;za|yIz03lc4eqf5yM^5`#x>jInuJS z@(l?dO3Y{mm5T5$ly5)JBG+F!b1fQf^3>OZS}^B&3kp#66A$VLNPLoLXI$~vB5R#X z%*{~AWdh#$?i015WLoGX-{7GC3<)mcC{*k={ITY#+Y@V5y2Iz|5l;sa&`Hxsr0cTP zY@2TV(Q=ZgVO*Hh`wvs-6tEi_9U5gAdN;)DZ$1o4Hk z0~xp#ewL`DN5VL%AupI;HDe?qSx9GaIZvtm7#d+*>)L>ScMEMCykjyoOThguLBo8d`mKnOY1zy zGlf%o?eFSeWp~(QI@%UhC$XB<&j2KMZ5&;6*$)Pb2#2<1h@Gn8aDxR7-M7WNRo{jh%%W3smRJ- z`Hd4#kh>XT{~rK8K*7IV?56>2V+(p}{WW|cykE^vLRpQw)#M(15W?)gIs^XM%3$%) z_knVVl=@12wyC~vy>$Z@$!0;6uOHa^mpQGPJ+B>r(PXbyns`bE=T&rfX#9vPoR14@ z91_#EdID|Nlc4s_4+7V_k7hRUkoyO)RSqK;7u(X>KPnxdjt%m;J5IuzyZv()y6mkJuEKt@HhXqqbPavU! zp?(wj)KV3E$4&KK55qbwzy)EDAS2&fkmIh2QVMLD1jRu zeqgsoC0eACoc!jqwyf%fh7thop-@U{P7*gN&fu*)#A(7CPa&6uo|gx*1BL9}dJlDX zj>RssyM01jg9^z!pL$aORqmL<5!+k0JSec{d5l1d_XSytWn@maG8}Qqv49otHuj&S?!_ z*``Ptzwr-s_?X{yFShE){B_QY6Cg0hJc@)oy7{mUhVdYTzp7NHW1t3%F96+F(~{33 z4Ve6Pur4?<=#*mWZa_B&7dIp!AM1~czVVxdnKK1!^5M>>F^$}O6V&#UUDJN_??#8(^WNY;Is z+kR1xF#?WpdoC*NMF7|HeP_1rCQCJS(m}7GiqZDaX)Nfz27V@zN4(5QfngiPiSok4 zPjUEYM&VmrjWKr_O)4I9)14BeHvF{Y_gKOi#bolTE(1}Mtya}aKJL}@UQP1=v)Kkc zMs8O_O~@t%;Cggvxw3KQD=3=veywQ-oZrrE<&79*SOpcPj%Hl}cFJ*H&5h6;F$}q9KHYUR%6vkGol1 zKqK8?a-v{DtEl^T*juj`Aj7WRf+U{Ka$El|Ju*aWE8tK5!Wv=>bI}_Skd8JHQhfs< z&9~GZr4o1D;vvUV*EX1#Q;eq3(+e7j_?4djG=Ia`7=tjc$HTRJan&U3Cb}#X)J~AX z*})cTxzzREjDadJ(QiL-@(m16iaBZkS;0@!=W~@|uC~nx+$X>$-Oze{+o%sx!>>K2 z8nkHIUt+wCX}-EC%2dMTMYJr%Q?UOli5Rpd?}a7w$VKA;CwTamYnSpI9OjIA16(K# zhjVNp#zMMJrAy1hxV5WMun*rux>^)zhYg_L(jeUaC*N|z>X-+O9bW_F((th)n=QEd z(^sR~fIGt~qoNXJt}B%Qqv|D6KLXgB6)M%|xg#GdowW^DF%G}5O#`L}oH+v@SNLW3ZC-%B`6i@1?w zs4M5#slfDPzKr;=m5@I9QQ9M?++M3G-t?Y6&LXf_*fylHz{HL(bvnOvDx9MtH;%)l z#z*d7H!Ek#_Tg6}I%qvRI#iNI>;HLZjQReXo~hz85J0btM>~1(4b+ay*m-K8BBr|H zv8aX(sM?nI@x?dq(7im`YeylV0W=1cNpdy;iRebInw$c0_)q%@C@=$b4u*LH!*V?#7bpD2=qi!V z2f4yxHb@>3Hff|%5<}JuYp7ya@CKyKr)GT9nq9-5*`z-CcXECYXL~)o2@?li>(2z( zTMlJ^ymSawOj<)eQ%$klxQw(oa6)kI`_{`GJ2PVsjq%(>X=&hk0atxSGR4;>q zgzpLrtTd_l;Kp%d3?cr75Jy6DB5rUdMhJmTW6c5^%~TfokrLXj;jE1UxUK%krcEp3 z4NpaDReTlAPfhW4sni>3gA=P0ARt=ma*8IVfiZ)v7M&O7XW~vR?onlrj-bh6bO!uq zt#Sk^33C|^p-Zf5m?!W9urWK}s3~53?1vs7p@8h{=47UJ$)I0|%y8rGm08|&0&oq|st-}X<4Ted7v&3a za{gD-LLEN%10)OtK~`oj@3*|pa;vyW0$W@%YHOmpF|7hK|YjbSN}#wAmiZZmWRSuG%% ztTN}|MHCb2k6TsRf#j}Tkju^&;OlY%PLr?|-MV&4&k{`os!fGw? zY6BRa7AJ52SeZjM7Sb&kb-08uU@u{*-+gZ*VobBsJ{N(8&w@0U_JDzBa-s_>*(FF`lf$>L$7D3L-uS3UB~z+zNHQxKE)h9)@d9im>=cg|KOK?oU{2`7SCQ_xUS3mo<1J zbK4oulh}VOrfhSrrLQhB=%S`WIOh_hR-1wMhdBmDY98>%`^PKXMm7L_I!w$`1Ds8! z40*g$ef8f;Z>XIy0+=iQ;zwE|7-k5$jgTmQ-Dyy%n!y{&}(J_C*_WM!`q%K5@DtRBQBQe$Ss%q z`TP<9ZD?uL8VRI0l4T z>;GE(8Ui{DD@T4;Wj1MjdnMRUNd(Y%F-9Jto*rZ>0zvWx|S1}Xgce2@=i<5bhTtJ#lAfj^(k zI7H)h$hIVN-23CSSYr&$( z61SqyTXTEb*LJmz;n*9@IK~+cmy)ik)9^dl2#AnjiTw!>L7QlfO8U@S%P}rQ9{+=W zwTe8?M#V!}BHSwLxrbQR2ekvLJ(3jfUl_ka-%SH4><46wdaQ9oiN@J{7N#?B-t}@y zTZFN%Au<4EOh%Msh~J5rU{A}`5NAONuu7i9L$Bt_`c0m(Ih$$0`bJ#SyVNh~a@-B| z6iNknPjGV?7;GPqNc)fl!QR9WH*g23hrj^t)h9u|#m;~zjoE$7Ya9&XD8@6tqRSB8 zA50K)g#uEX^l;lC016pFk&n-AzX-sDoN7WoWKU1u`P93p+&!8h@zrvr{>ySjapjG} z6vKPy!4k9t!#8)q&j!-%>>MwjuQ46Q3oc=S~WVi_uceDo;F9~rP%4(rW72^Srce#a@{^RGOkypb6OU(+}rgi=K&{G zg~zhJAB(WF{2`XNl%tL!cz+~kYguWQ+u&WpYI32P=ME}rl8D$?_H(FUDO&xf0jv0l zEB9jF`Z{|-Y4N`!QEBO5M(1Yw>h2_gv2ol)!bES+6JM#rN~*}By;Z3iEQ;F`1TYY{ zz?WIMua)>6HSc{uHT0DxNN7yyNUZr2rF)ASPhZ}PkadAV!>#^{UClDzY#)VQRe2v9 z8gES34XO7vIbDv=D1sNFCtu?72B=`ty-S>K3M0K`_DbgVf0rhm2Tb!D?92m);qWpJ zs4HDW$j-_1IaBwzIa6S|!?xxh(ots-*NESvu|nb~xlCXvf@5`{^)}Vwo6_s;Ef!Ug ziRj-YaDn>BgsZT1(ix7F#qnh)j7;Cscq=8+dS9w#*6&}mQd9BbPNp)|4<2Y z{yfZ7lEPK5-J;Gvjw=D_RUidaT0N zz*~Kqn670^d)0?ckekx}j;m8^=_-fX&4g%V(vIV$>!0nBV`mDH-wR6C@^HVcwt1^x zXgVFi51AC0X8TrHE%bpKl6XO9$) z_*-z)^^>M^qEC3W8Qe+#M9DJiA+)b5+FC0~1X%(1g}9?fBRq}93;t|8BihV^_%e&F zt+`Icvju0G%566#iGlAXt9hKZNV?)*0vKfkBql{@ z^&0ac9>+u4@utAZK!}7N78nw@LfL*Zpr8-vA`9t`Hc0eBePLsolW^#0%qauLUm1%-RE zNTKi+8nnE~*Bb|CYtMH`#px?1Z61qZTKe;oog{~30tP}EeujY{B> zMY?Pg&bw>$6Md{GFrZVTm$+%>u#!o(EDI?cLxwdp_yc%FIA$X;Kgl}-QELXAF5A;{ zvVUR3j)neX_xxix{0!KePI_#0Fua!|@XVKEoOKUyJRn<>+IsOa!jO>(oNZc9zp&^g z1c?q1kV0Q#&QMT0OTbS3#T(oFpLuuzZOh4IF7H1dnTkq{n1^Xeb`W5s26ZwrTM=h2dm&D)|=@GfPS)|KJVjrhB(4$kMQ=j^ewuc_>e9ud2wN<9+Jly zf)!3uk8o$q#YSsSGCHPxoN@l?!k0QgxA1KwgHq?Y@j1)FkenfUmc=Ac)dpUFYpH(* zQ*F6&QRR8vxJb$Z-?0Vs9pPyMlp0TVsja1Y?M}!D?l>3K#k3w#5cd}t&&<{w{d0EQ zao|+<2xj*h1TjP6gIrPVOikzdP^n6+Rv@kU$iHJNf|iFa*hA062w&h-?hcsFTrs3q zs~jT?!Is9mM%wur{~KgOAsj>G-H4sKR2itI$dtsQ?UHy{?Jm#`@NqVgqk!^Rk95JV z35}pI*>tTbqy)#@#Bs6V6%TGe{m%_scFfI}CvHx?02&Z%DZPMvHqORbsWdjUxnHn5 z$+!C$ee;OfoQ@@HZ=~w+eM*yj{0SQEPt}R>Uruzmr#*Ce-($^^ngGh;!>&m>9h1!H z{n{w2Y+zq{3T9@LhG8}zw0HfZAO{Ku1Ht6Q+!l-gGylk`L*G^S)AJr7bN!#AWd|2ac_I`yWr z7E>xNNY8Rtkj6BTqaI~$qAF@Dm@zJc?uLM7weO*SJiAHMm4vl#a#?R5EZ=h&EU@^K!7>*gdA1Pw}W4&3K@6D&)B_^~x^nqw-@( zk3i?UrO{31@A?UEt;gh44&JyW9EDf&t?bhdR_(O;-8HigY}(7-b#y&<0Lgxi_u&t+ zNCvsA6mY*jsVY$kztGe?Exkd^f(^rjof+n0g+1J*_*ZnVl%}y}OQ(jd#w0M=mxg;=3wr*YEXMhZ^gk zeQW1=RnhS9D|`Av80j(;lITTQ+4jYOXqVYt;M=v`%b_CRq`>00Lm<_G7(93s!R$P6 zqJGAOiLa2m2anr4SS}uEZw43YLtcbW{OA@3TR5&9bp^CVCw$(t*m@~nC=Iwfgvd@f zxE%gm2<6?4?nd0^L9FmsNs%lLUq>BVdT^ApoM7Rie6j1$XQm+jDt)|wtsuZQscskG zpv)?ER3mzpVeBa;j$)R%7qrp7T+aOkS(|{ov~)m@|5Q*qaLkmpKL2eWAKV*La1_eH z=?K(6@N zf^O?U5A3{bz8+#!oxBrzS=?=GG?i~s!BL<0a|@D5fHNTJKr^5_JqhJxj_Hxi^>F#S z0~!ANWY=H0!uNgQeYuTY8XIei;{CPGAzyJeh_cb;b5o%pA6>f_UsCf8t3yq|6QSZ4t1M}<1F-c4{6P5ADEQ{>yl$<#{S`i5$RfVbyPk_QpH2-K0{|PA z@|&ucFdyuJ^14iB@lq$1Ke@_kei=^7J&LgUXkcn2^Fi(tIY*;*Rj`OFGce<*32S1p zP8zCi$%}ymHwi0eDQ%82x`Yy)V&<5LO zQyJaHY+F_vba>S~v-OhEBLTmUV9;2ip|5cdB?f! z!-v&mF-W`wB3Ckv>bMi|k~B0lh!Ow~wcfuaHdXI+SHtqHM#k5WuTp(5f>|%|<$b+3(L`Pw6_|s;dX;!qQ*CO+t(Duh;#GWvV{k*f_*q#$y$|yFl`gR>_|*Cy0@k^WMM^xP8evkbz1QBswwF-X_Fv?T{pb(akA9 zaPbf4e*DB?hg2L!7=m!Tz*Lp(lOQr&uA29{4BZGX0m|@;KBwqREnn!+*6wSF24^Z_ zuo~g1xom8{sRA=;&zg|`9mv=67$pxwik~N#wo2J`LyM4lBakL$mw%r}q!o!HU7pgo zSpBC6u&Ya=F4QzgDa&i)x6ETP$b4|^Z7X4XNrYG;%QY7307=XNmjT;9QUqU&=-vM> zqmqW;{wgpfyB65R9tp-L6GGi4+_$uRDv}fON}?5*NzTD`W^;=HRg3^&Gw(X9ai${s z2&I&LHy)9J$7w$)&s|vWP2@JQRq4D!MY)2gNZIp12iw%Bx=0dnWYQrKpn?MFdg!lW z**S>Q*$g1 z=1yUP;p*^p!=phhOAJ?%{NxYoz6f-a4_M3odd$3JYWhiMRz@+?&ilm|HGmocu+*bH zQ=6)i->`5N3*(EEp4(tpp@eSA8l@@h%&^rz%p!e^XFeK5w|xSHR(qdNw-(L+ancF#DYQW^a=4q*GCn`qACl)UmRks-_$g@W z&|5p-p!*6u`$jB=`psc4=(68yig5C$5sLdk)Rnb>fjovl$T)B~DGRK9SL00>RtpwvMie4>CShn~o9%*^^qfekY1GLQD(U+v?(lWN& zQF#y^yP2$SMcjpZF*hWqS9}SNV?)>lR`E3;<6=h#j7u$paWk}0J5=jZ36|r$C3>;R zWjMzPhF!lq-hMu7$t`{J$y}y@C{s*7V=#)YVOIT2HiWY3EH8yK%zXC*I-E%MQ?kEj z5m-;^S(k$3yEu}xC3HLRqG1d-V-qcY#|hpRTE)_ED~J|(VFTokb+^ruHspS%IzRCq;1!l?}AqJ1)h3YywenWp|rq9nHf1TQGtKmj019#Kt&2UQVnj zGQXV00)%ptNoVfvMunTM-ovIcfLnM_rTwxSxARbxAa&`{NulUHELhXi7PNp>G@$i5 zVbj>nEzhW@i#x$8l$76Sea4^^t{sH1ol2B`L^t_stoEQYd-X= zGf!dF$fwaUqndY4oMQ!+=ab7K_K@P6d3ned^LRa^ry8PDTX;mcHtJ$pCpCd^6j4y$-$kV78#m8gk~Fi`S#zjaZcY<<&iKEaP?Iq zV3@R?I}+@KSuWlXTcW9@_rH+vKR!K^j4fD9%`bW=8ey_Qb`-8un}~p?>*1ASgygI| zq9HxL01Ume-;|mU4%+jZAwRkw{sOymnrN3O6}~u(t^kV?mkFe&@jW@;Q*rX`FiNS0 zJyK5$`BxuCq6kVNwasbSMbs&|vmyPwd|6g84|wJ;SvU1@4b5v#hjEPQA5H5 zJ2s~JPxycgTn%`UJOh`@81G;wY8}n+Llcsd7ptW9Uw@1|*U*(Jf=u$!tsxDRW;UH7 z$A74Qg6oh8A8Hd8$3~~{u>os`&~k2>MNB=%LIJ#`RBC2@5^?!>d|J>aXAwOg$9HDKu+IXca3|1a~`@L;t6cHhKGhhe7Ej9+;>gaQ>~ zUB$VgZiIHJ9im8p+5L+rhr9FL3zQqiRTMIVJD znI)_%#NZasQg@IAonPu>O{ZMSn>MDfIq-T4(G-?Uf4i6`BuRxS8y2gr{b!kI(fsn<1C!|7?r#roHEAtCnJl>8!iQteuGl(m8s8$P z5~B&JWP_yNuf*%ZOP2NGBcUI>`^#O1m`3ViQc3}C{=FFl{z*L?iq15K6y-k$!(u^R<^sGLSJx6dbq}pDcNj)+)I4m2j%m<9=_K&O( zS9sv=ukQ!Z~zFntHQ$2m)}`arR^;f6?VjY_DvZ73DSKaM|V8*S#P) zw}_hiH7kPJ^;(0z9Lg2JjetDM5N~`0mHo%_=Ih!lKJ;h5Q!7(4KbhCaz)Gb-qSqFW z)3y!7m@N;r67@Rcs2OA=xp3cNadlSFo_>Jc0nCP~IP=5`g5pwm{}^p^k*pDI_&676 zi}{`POV_;yE%(NE4$Bk+1EQR@$zU9sM{nohyQyT}n;y1f`M*5m5Z*W|sfS>?}Ep*g>xd2(%TGTMEw_EseJ~E5J1Fs9NR_zPprv*FJk8}eB zeG-6F$63^IgW7nmn-=*?KswRC*aUT5)27O3gP%QWvF13R-ka4w(TkWXD(%UrVP&4U za8VzF=uEL!-MVT# z&xF|QKOYKq&b)S#C=84gCFy9%RRg!^>Z3?)5t2#sR=?jTp34{8d2IHdAlY=gS7T?^ zdICsde5~uAytIYdmRp|cB?lj zDdgqj&6{90DfPZke){5@hFdwTtGLfk!mqqdwf%C4{2nt@ug?J#M{z;tMQ-PhF~g@b zA|Y;bC{$7u{h(;1nL$7aX`d3gd8y3YQ382VcK>t}E-!TZVY_s2T2QvZpB(P#30Mc% zN>J`~a~Y^75m_*6Vg7=u zU1_^nE8ZL5dZW`?NumRY=wvF>4F@-_EIlumxgnI$kHBK$H$BR=1q2Kq^#?2~8r zqLXLezN0VpRBnvL*N>GtWjk6voM3e^(+DMc{~Fb(H~%UI-n(q^oB=629xltuzT9cI z(={BdsAw^xG}9kORBz-fE0u%}lZKB(xzOUbdNUm5;jnzp=4y_@!m z8~>013IG5A0H2%%2LF@zchA&M)v!P6-cQxiAHP3fub;nPtE11=)2Hg_FZ1-jzJEbq ztElez`=k20H~%X~=gB~yBgyhSzIvqj4*1dV_Rs3rFZS)1?cE+gYoHwqx>aO?>`Y_uD7f?dVm-5o$lxnc6gB*{=gPEJ^pJ+brRSrZMz~tyMS*+%z|%+>ac@F9qpI2jJZ@Yo}A&Rq>8stD255!uMV+2=BUwp~IQUGPTYzZB+Dbd!?=&-dwE z+e&Qvn5NzTE4stlRnl2J4_P-ZY*jVz$&2;S=%IDTY>KE6)K7mEgA+3%+1ck5W6eoB za3#6%ZuC3Z#*9B`e5}X|hKCeyhUD%cj)bY{s_*Xg2M84|Nt1*nqR29mCsCW4^#ucV zGVgKkzOBdbu^P5QG0I=YRo0hBb`a)G0m8>!X;ybXkZ4a@RFkUZnRah2q9d$mb>FBF zX^1_bdyPEzfJIose>*N?x3r4^9xJYwb`P7h(Ur4^1TZ{1-37oVrG&{C+BtJaTO~+4 z1AX3p%2aV({sw2kAK%vS5E_~-WW!VaJ_--i;Rq8I8m?H54!`@ll9Z}WtUzNmg6GYR zJ?O{Csg`2oRCn2DqlCbrZxy(-pQA?bqLkIELnp79t$TV9dQOR(4E8l>GUrSou5Iiw zea`5g?e8F@BhdOi;K1$~dsc5^1@5v~e=J=QwN1<2-6{meIe<(wyJ?cIe!^-|%??n~ zNmU)RY)6mhRpgk`M0_T1E;6Ck+MqVJOizs>j&XXQlA0`5Wh{QU1_^89f zuJbLsTm>3%aic@bwE9S=<&+GCb_c61^R(L3$?)##XaKcw)=;@3ArIvA(Gv5FoMXcNGJ*xVG;QcO}MNw{qb;$BSeN7Dpo!y@ifHEZwb%E+Ys?JabdrM#U zsE{S_p-)Rf8J^n3?P#9|9CE#p%*1>1`s6W$vycx$0J`2WGb+YiStkWOSMn|i)_KmV zVIzy`LG^>NM-(f*a@xO=c2a(ybL|_Yyn`RStPzKy5Gv}1_%H4Qoi*xWtFEV6sp5;tKmqR?o6PCzfixXykr6QzdVOdGk8|x?Wn% zEwQb2_GmMiy3(`uaeYe|Z_~DdKorY>?AaM+kp%RrIT~F-N=LW^=a=hAM%wp013CBE zW08wzYNLU{7JJ0-Lpw@+HZPkPSDdwcRlL7j5i!*fP%g);+=}A8I0Q<^_<%YB#jsa^ zDLYC||ED$w9AB%;(4`pG^E@9kqKze&&{y+8or5^JbBwg)eNem-?z6$Mgc|3NvX zq&D(9;keqqS82HADR=$V>SLL>MI`ce+L3piyFllK-9DLR_K6Lr9qewK?}*#^09t_n zDp%&&8yDRDClrfO08Gfr$QM{YH9F#zrnT;jcYrpK9QDP(Hjhw?u{sJ=%&DguGq8G@ zEP9bXRd2*rxCrMpTO83_-`iwR#260y*8wp8&3Tv~{Uj@=C7&n$ril`2hx@DD{$y2ZG%x5O5~)+LV89wvH3)R+yIhbN zRT{JU(&Wp%wvOE(rL>rG&vP%e**m@j-H$&E*Vae{^sAfws2>zMD7H>3%ZjO&}M6u!1BNs0Ym5wMt$TY1jZqf3V4QqhM(6Sssb;YCPw)3u_7WZuE% zMia&3ny!@YIr>zaPAKSe9^t)yTTxzlRo<^p%x{>n0asSYYr^+l!i_~d9mYE8x&K{1 z+fW2>_inTY@L`bKiQcoPCZCelTCMPaU`!nvP+*o`RiwB&cG^qFvPUfMLfR@w=6%sqf)Ao^>t%KT@-o7CZOD2pdMi|FX~ro5Nepa8;^; zaUE~%MKymU$p*rT0s`&x)4~&y5!y)fYc@Jdve9}F(`ck{PJE7bX`hAbU(M&@!p)Q^ zU*qk_uij=F*%oYVH81e40X}RucULBI2Yk49cNr!dsuVfV(zu9t;SUcn9Q&23+cp~H z4(7*hs(P*1*tK2W5#t@H0-ytQ8J660D(Vp(u_f&vA$ZLOL}5l4p6$fNbN@Tla1`7{ z&(U4N4=TDst_{L?UTXE){ZsLB!|M(Nb?trx*i4U{JHLTndAr)$YilVYujP}F*gx&# zkVIlW&LMeUD}*1SefxzX+499;gveI290E4D&=!655?HKJ$nwJIb%(7QiUNH`btf0H z9nfq9gs9d$jO^~ol5N;~XJ6p;^M^bgIKt1>ikH!O+py3pCc834T$s5nubuQk7Ow4f zuLvdKOe`8fP`j+^2-I?Gzw%Abar;~EP||6moWNKk8#;@<$TNg-I#kI(EZr84c{SSs z{iK8QLReOS6T9#LOSTD3_9MagP(=B=UvTBPw)O^~updf1Nh7ViM<)VWEF=Z{3^yj; zL_LyIOitPK5@XhmW*`aHjqCh#K1vLfyK4&cPPRYwcJ@toH`vR;N`q5n=vLfC_`PhW zK!D72#vKvI0cf#Y{QBXA^$>vW*`N0rC23KSl8eTt|YPf zT9dLDd}S#bGY-uYSGvND{-ac1xPL$Ty%|rqtwHSD*^PCwJ@H@nf3DJw>2Xu;Jwfb; zq&NItZlr#l{b-)hyA5^qK{OHkTm`q8dk;2C4tONj%^T|2hj6tUng~8_n)P*_uXTfx zfL<^MPdU8gxE3X=E?!EV%C|^a(-}sG{hiEugGYOK8Ar@b6x3@9v0DCV@uq)Gc7GVRbmPx}Klbe$SR(d z+xOl^M2QGQKG>d{hWQg&Em>A-)qAeC@mm7dScbi8Y^%9gJ+si6@A4Nyg83!dfFsyp4!Y*X*13urC^tb215wCIp`am(*Y1zUnu{p0Q6F`;uJa2?fmIsHK%v z)Abx4ru;H1L_+7zh`Wvt(ljE$Vlsk|#XeEB++(ccJ~KdcRaB@@ZL5veu~%~AVzopf zK@nCW_o?uZxb!iI+}c!;7H)R2L;hh20uD3 zG3vHG5eS1~iyO2@!*qUh?uz7EEGQTUs^O`Cb=j9_Ag>m0L0j&OmIm+(tz9|>*FmJ%;2fWc>J*D(K3Lv)`12<;kBk|fqC38~sgX>}U{?W+V# zkbI7z3v)cxU$ioe@7f<^&!R-{NKfhalP2$ARn$mRLW~Q+-q#mnll;1 zAJ_ZadigapAMnofgqETjx?8{beH!3B5S##RkTyvlKTA!YhiK>5T;Cj6y4OXsQZymh z1vV@Ad(LFyejx1miU@p=Qo8cx)11`xQr%&?K)R1Zm!5*&9r=QN!>^?wUD^8OHeDgH zGviu&FY5>?JuMz!(fSdH84NAiNyg9u~Z67roy#x5reVU}+%#3#i0Y zF9&#Bj}aJIH~H!p`@R2d+8qJEt$%8R9Uf2H@p>7Fco12>3j#I=Sn<6ijh|QrKYZ$* z1DGU%!g`R&iB2)LM~&&>-X6(9omA!(3MpA92#ctU>SE84=VrMThH)CB_S0XzIsTV3 zi12&9VdX4NJ~Lk1_NqEIjt~8Yw|&#rli|=zZO;EV-8zNoyqQ;onxtZ>v|dX(=#5Py zw-%8yyIjnJ{XyWai~j;($3Aiizms6+|8~76(C^%YcTlBB#BDFj73it7s|ax7>iP>d zNcL;+ghqWDJ48m4OIz{gvU7`k)HvNgDOaSOE8w2MT&eaXijePEYj(2*+nDhsv-$e zP=fxB+0)zLZI~HlI0=idc*cTRW|IL*1@3~0tJfFNmIoZH#ND89z6&sMKY>uy7qjjZ zjdjokg1iI)Zn;QJEq^wVWj7v!e(l4*Re1xHI!<4DQk9he(H$s-PP*rh29Hg)DIeR>z5`?e^uI&d(oyM3-l_!hNIP=5?FpngC8-ECcrQP8_6Mof`<|n_2^|W6%wbnrQ}6xA1S2-@@X}M8y1L*Bt}n|nOLNG%O;>m9xZtw1ML>drICi3< zK%A<#PA5sO-B^OCS4jFu1MSZ?uPO)*Nk+pZGOLj21V>}cT#&sc!XhEZBATc!G_tU@ z0okt~eD1%>#RH~Aklr|0#qt4@Z;jWYrt7jpGrCmRQE#*W|ZGkC$rjGbtl@ z{7vkjXa|EDU=d!8(_w)!HDW&NjNUbwBfNQEvhCZQF^KUVdm87S+D6TwRMi}v?qX)w z)&3~Y__A+mboAm-&#r-WhVe?tsZ*!2LFci@GlYgrG8ITQYUfS3P^~SXSPTNTdjD=& zU`8Z7lEz~Ryw9I8c4qgx49*+ob2MAHY+n3?MJClLW5vQO`@0G+g)n7*rHS3xQCeE8 z*TdL0E5Gy47%xA<=Uu0USWE&>ky1J-4NCdw&_wsF+4twFHuM@a1ZU+}( zeSL!bE=kDFZf+vJZb{On;aXb~)!5csxK~FN*=JtTgEz}Ctxr0yUBr7Tl+wfv!utUz zev`OFG+RV-Ph!kuZDKdU_!ssyL_A_z(ZF6ZF6Cb&r1n|z#|(@cmoFt{@{71TD{=^4 zU8?6!;lj0RIjc*Ufs(O{7jK=>*`ITE0aNS!WsTN?ES4h5#=si!$~GQm$j?zLsA%cQ*lNrK!_+65qB$8V;YpSzl-RF zSQBoS^&cM)iI>LxAj2y=ON&~_;V52i2aYmpH2&mgu2Lb&4&Y_Cy`CK3T9j*gsaM&! zQ%;u5qJs_^DVHSGlrtyjp<`YWcax^Yb;-S5;uL+~<7HmN)sGB2?mOBXRo^*B8FX%h zg*^`;4iDfUbX_zwNf{n}Q>ZXkP0d0JeGX95Obxq7dHaK)2SbV>j6}Ymt^MW=zeSGu z(g(a#5P0D9H8KU8Ilau2Xwsu+tw+-0N zDHaP>X}uXcyjxbl=41^l(NN|s4mWBkSx*i6$pAY*#J^9T_~k+M@n)%{s?*J#8~<|M zlM6xP9#oIsQU6-C7{bkF`;D70dpT*eC+?dhoC^tNhUIr@k`ee%pQuo$ulk2wo?>or z9ns4m@g4*~cVSN1vd|7P_}ZpS5`Keta~)Io8BwUAJjRi;*v1A_RU-O-@LfQ_zF@MJ z$!;|?$Y+a~?07<(ChoEzEJC@s-W3Ty-8|LFTd+<m-H2LVNPLSB+BUv<6n8;ReS~-0@&Z-Q*o!lY6anP5~8xc z?<|9Qc~u@L0HIGR^FG$P#7;x2SaVqD%-bN4-nFCC>nW`S&yD8Xr-y0#Oq}OCnQsZ> zTLSPE>qFN#)`z%6F_?PjYy4~siIDyY^C6@99f>VNsIl(@0rNnLP&L8_0V?p)m=71{ z#P#@WX0N+l8=RI^sc;I~c1nC+N%VP=E})O{jhdh5#}($V*RqaZo18wPsgRYS=95G2 ziQ+IzCdy<*!skc&0G-uyd-kScUbhM$o6EWfh?OhOy+1-`}54VW7x5;`9 zgH!U*e6vll;aWv7T}zsIV=q+ioPNZF3JVH`{ zHD*n(z7(E89c1KfN5dguXXK=|JNUcj$c~j`o?ES?fDw_SckToJdJHDm+6QRt=_Hm$ zo>C*tM^ZEE^}*;`6$g(bqC&N^VRb~LvdN+*>7(nQhU?0x&OQ@awxFus+S6|v0t$w(1WTANuPXl+6*s40uy9I$ zdLksX*<6!G-QFJbNyh55{g|LVRyx`vmAwIAe(e4g-ejDdk4ll{9IU9LZNHpRtNT3c zgzuMYpM-+s!DW7zMS`y4tf2$7iOiZ27*{J4Y>hMA%aWP=;cH7a{JX@cT_z-v*X?Pq z(sgl5Dd83n)Hu&nsLti)4hq`Y@!0)^XTps}5)vMf?b6h(6XF#i0D05a%lv=_6Ah?n z>T3C>1qCWag{6pGxMp)jI)a|$F=IuIT5PC{?{+)Ly*<1=! zYiWmIYS%(smA=-=D%zG3++#c;6K)D8c`1Le@yJ!k5wem|UDZ;*fiPhQyU$dZE!R0gHHRWM@``$JJ7f5-m*ACy>1=>lmha z)3uHSH8WWm&<=0-UtQQ|F;4uyl@m-M6mfi%9svzp^#D!;(MqZzia|}k7E6_DKkUJ+ zkSb|xjSD8AaIfN>5Shl!-8<5EAsJ6`iS~0s@nq(DB+y#Mg`&X5!~R;xua}qH!&*fK z)L#CfDtBPATpO*IUph9h{n$;5c0~q*=bL`y&XoAX7ny)Fon;X?38L?>5njZgGk4Mg zXA0`3CA5AdX=Vu!dni!ak<4`_19j)=X3)B(?cux9ET)p;=L^WW6pxzPZdcFJK?9+w zxD=i5Gz`o?)0N~h8wmBWxI;^ zO6?_|pd&_dl08$#cQAuYX-Ylz>7Wm-xak;8NTK7*`f~56bS>!}4^On@13;+w-D}Iq zvMjee4WTHt|4S0gmw0IED91NS(z~?0%Bu^DtQR8CQ(pG(DXwTP^^+k_h#g*}Gc7%K z=z*wBj9=GcLTIKge#dDA3h&jTkID)gD?ZCG$CkI9V$9n~6V%o>Q-_sI1{vB-p92>5 zXm0*|J;Ng$RV8Dgf5fD0px)fGfGNMyGs$izJLP4!52tyJ^|j|_rPjbZy}8Y#v{vy< z%t>=h15-Hh>bmvLc$mktiGrg6a@e;vi?^kg{I= zSKK6RFygq1Tv_kC3)cWbhMvA0e634An0Y461`jo|FRvvDn4a@HAZ0Wpj-b?6t7r;GOC{MTt-myBIe zW*OH%?_hIyRcwv(0;;}94FY8|HW;|odc0_W{r_oQc1X_}NYOL$ueY6-4q6f#QU0}= z)#%AWFtaT}Uc01z6nBbm7+N)*|20Tj@o0CeI3vjaTQ`n>pk5_#J)?M)Cu(5LAbhZJ zN8a2>Ood0T!1xlb!6apmZFmUktrpu1RQ zr5=cKr=qJH*J8p0gg-cx^4z?9pEbp-(N|SQ4TsV9Z1tdM2drCqX)8Jj!OPrz*I^^F zl`UE@L+xUK?G%1N>#(`U6fO9Zs ztJUT(3+vwS^VmEjicVQ})eh^D%N(}XflCG6Nmz#ora0CdI52M?`M9cvUTkc(#i$Z< zk?g&JhCxZ+chsqExc0xZRun52y5Xc0b)!%*w;@5YE<~FgNnLGF_G!w(;TjT1o5+`kOEs96*QHquA7@rC(?$@qyw;I_Z8Xsj0>%*1>?Bh}eOoj`ObHvlXtaHe3jCK`?vbpLGYR#D3Pk%*A#f$WV z!@ZiVMt5?d`1259D!gLK%!r8X1Malu#GSX|_%v@v@GRMVHCjpLZSub^FSmCsSBCmgJLQleR**+ea>r$i&a++K-UAAn>E2FB&)@de zq7`|t4CenAL$}&fq>3gSm7_eC$2$2*WCk#Mqi`%?r5tc1gom&ZAJieoa@a}t&+q&eHcNqX5q>|%}5!SQrG)O$d}-2H}WQ`5YV|YxEc~(G58w_ zpFryzzMVeqlR*RsaFAvBB4XNR_(HqH^7lBg(F_Z}q~8vu!DteZ5cAmyJEyeTg)BaO;-hyO9KXzw4k%Nx$ireC~tKQPM4z3<4YrF&KLwg z9G2Yk6&L9YhI*otl~4==p9L_y?u8H1*$v-y-q{MduK>ZXjt))NQ6&86m$!+ zv2qZIMgazsW@HJ(I9pdi807bTv_bIUk-BM)NaEj`zfh)5Ssztp1i1GUWROr|yTjKm zK2kL`Lwy6s5Iw+>>*r&LXZW4YNe-x6vh6~`1V9v7dTig+td1oAcT6GKR772bh7Y*4 z7eBfy@pQuys;lJyaQPr7Pc4gwQB1hge_dv?d@dU(G1-1EDaU;#FS|<}j5Vh{>aZZ1 zLDX~}QlT7(bj29|24E!Qm7a2;p7iOjcdC-8G;=ipJbXwZM_~zQ61BD?whV`EMCY1$ zId^Ucr|qXRBNz4+@GNEz1K@SI-Rc2|R)(QWv-&sPey`P=3vC{E6@xL*+^5or<$Xav zXBsvV=fWCfK1DEPe|~%ysfxlJ;ST6c4XVkVhKYPfvZ79%;m+k7C$AoOs2v%m@eNzG z1G(x7)a>=xo9ciP9es7HCgdZ6>K^VRD_R_yHv6|?_Nu=bkfh;JnT=Y$Ph})s85uDW zQeMH}ZVQoJTEh_4orH5?224%y;|IVVaj{TT1WCrIr4tQR+4NOg*m zqzXL*l8WX{MCC|K%F?R(Uvq^?X~3Qdk3}xi0{6u$adovj^t6EiXGc*LG60nkyzv-k zm~R@obX-^<2~D#H#YcLQCDCR^A~v?5p+%WgYnPk8zc2aR>Qh)EXqBgZM{#CAZ;oVb5o)1B zKKO|VIlR6qy4Y&wAOK3|l(;x3_~B`!u4g50Mn7Z__o1ZJ3?R414lDa3l5s=N>#q^p z^0U&zpLmggbw>_7DLptqA&eE1fa-jAQ##B*&sq$RMk!Np^3b!bB~2Nq&SHU11r>jD zY~9>U-#1r(qlDz^_rzv$_u@-nQKv!NnbZq{jjgIPZWH%|?V)aiw_Cg9T9 zHvs2NeV^SgKA9gG7J0yGhOYqryxg>J2nB@jrQa;N48q2i?a}i?Eik7q#S>V>xkc8{ zb41DW5i5JIGEnOwreG^sR7)l1KW%B4MHvQ88AHDlzMa4=@lIf} z9f=#Hz8cEm-TOBALuSv9Ej^jBW_^SzL=TK>>VqS~;^U(nrS-!_;0ym4Xy5In9L-UE zY+mEZKE>gqpz6!Ju4q|$mJrXuG*&88{aQ&h)!7?W3d)mo#|m!k_hlI01Qj~o2RbvI$%YAW$e{7h*Sp=QU7-{A=uL;vWd&OPn&M#Gh%o|Gs}U29JW=+6xr@EPnA#;$58dx2Hrp=8L7Tm)|90qA8Qb1JXNd%Gr=NE zrwxSAYfHMB`MI)}16LAwH|W9%6@ zoyo=w`bn8okKDc1hTAci;q(N-Ybk&n`YN8Ax1ACD^Hnn&d>QlEsgJoJ=6>(pr!Cd|Xm3LUX&vGQACD*ZO z)>(8^fDdX1_d&f1G!z(^P?suLj-yjcipzY7PQ59=e%#iy%Y+FC3HCFS=7QdhzqcWGL zAn$A!>ie?$t9F8of=Uhy;Hx-?MXG1Oim?fv!A1D*vbK$~RpfvVaE<7DZ{rvELcLn z%dIPBvkq7GBepQgN4(=0OR-Vm#PF->yh|2zTurvc+RWLA_no6_fsEyLWF@zOfw4sB zG1O`(3siR#td~ZgY9=}qfg0Nz$B=pj&;|y62Eiwh!c?w`z(A@^j{S2szM`?tsy$x6 zHx{6vfHCxoGgFIIRnK{9K99qcnDSr(2X@kHf`Wj{^z+teA)d6xAT#V+_${vN6Y10; z`#BVP2&d`@8b>U7kZO!sWQg;6SF}+aK{%zvZ^|3VgzmeFG&b`P(2{yed24)Z6MzuW zB5XEcO*dI=2P61nlv^Zt>}YdGo$Jo>bZRN!)w(rHR3bcmMv;{9Py>MdmSuVXA7+}X zsfHKRm?5DjygtpiTdE&aU?rf9n9Ixj}FlQx6Zmdg8grS=D@m9Dl|{ z3z;W^@UDx3{v2eO!x7E-|=2BJRHKfSSSR7k+NcqIg z>y!aIf25VbIKkQBo&K^dzmxQ?m*u-Va0q{~)dwi(g;w{eBo0^#Sh~t$d55qqK1VX0 zJxPN!xrxl5T@atj0^Bu|uR#-nz5i$|7Ev+$i>o7}NyPH((O&kZ-;L+zkxS*5R zC^4&mx|~5`%DPiDO|h7AUGg9jBCaD@DQDTXD;^sMVn1COt%umF+pXOiz1OJ|#@ZL- zsd-4c*Ynt)v-QrHOC9qCJ1Svt-}3!TMFL`8OF-8kS_ZXr!})3E%c40w!ty~QvEn1q zdV6=f=<}U}brmd{h^bAE8_lpxX?sS=VTAgtjn9qKqN5J3>TPnl5TUBK?`YvYet6)q zKWT*pqp_WmoK0JMpFWaLO^Rb2E^m>pf}7@r+4j897;{LvZXv7mT=A5J(^EXBzs zS5Vzs;6eK%*QgX}E4y?bJYmJ5Dqn8^?z=~2&y8bJ=>uTIWHY<*9G5p6LS^BzGP*U! zBoe$we$w9}jwV9UXEQDKfklHtr@75~PGKPp({op+d08>d`2{_vg zdvxXhaGs*h;=G^(2}VouHmh~3b84S_W$%DhUzj_A>t4^jCNz&r7kQ+p?W0BQ1SR;i z0lqs#-xmjn;CzP9W;a}HHVWIGxrWJV`q2Iv1Z&M6k#=TW91nR^zf-?#`IP4QD<&ll z5)x*^H^0^E@uw1Ad`lp9!nmA@pr*{gT(x)Qa!1-@FD?sY?)c z3C)wJx-p0@%fVhXQ>^^BwuNJ}7~I87v~b~B(EIp+#=y^ods`U8_!W`x^>8J3BYskQ zZ)R}u8n8TNUk-3LUhoLqakIj0hh)cKQvW@#gd<_R{p|C)@NE!AJUG_0N@`OwR)u;V zMY0M|#b4%s0aq&W4QHx5lvsMDI*$Nen~Fb95UM`AU2rrhZIm@z1}b-?a`fqj`q!x` zV!qlsK?6NK_Ho*{Au@!z7NXj9^qZ>fEdU?27(IK`hGL$3GY2oD1VW+Ba#v4Q;U0#( z4WA0(Lfw493ga-j?Z5Y>@T3g|x&%&WFW*oR@`)iHUEoKr05!EH%Y`G%$ioF=@YJ;R zR6OkObvPh!b<>*bCb-JeD{13VbHBx8OHt%ng41P$_1b?4p*fk!**QhUlBb=)ZNIuB zZ&R`saJxlqDx=zrdWD+r_rq%B^D#;Z(gw^6?+n&go~XCzxmMqvAR}f@1^dM}h)H+Q zD35t68#~Q6iU<3$s$mho!Np2>vk6YS&4HH4W2X?MFSkd^%6D4p&-2VInDk%1x{?un zV{jIZi=-PK#*X6ws4XbIQ_194-AVplm?DTyApq#TE%T@)tVDtU#89K0`x(?C{idE( zn4AK53P+G~Gjw#OG&5^HMYd;kAX}Y+tyj{2{8Ikh{=&zbqpyuvuD`^f|P_K_%t0 ze5w+{LUAn@5;zJ&f(z381!>|09j@nGpbf7E{I~`(In2_zz zj&y7K1?CWnKP#hKXfF-QI!EzjHM2#-ucf=GLDq?eJ_7)Xm7gA&W@d8M!UYyivj!2J zybEoyM>CJAr%l5*gEHG*M*cNF`D*bs-~Mi-xJCVRgPhI&KMq^5)3PQ3Jql{~Lywxm z9!Yuz85(_{1f$0Ch_@1W)lPtfZriI2_m$HCB zA$V2tz*ml;?d@6}M#c2T6^QR=j#7BP;;w}Bg)a%y=)rWt{kgP3oycrZ?`r5#vwWbL zg@+Nl4N4PsMid2q{35*~KJVF4q(6*0JeYNnyldL!!V8Zi^%2vj{WyH*U7-hXVJK zxu5b(;q?gklB-i{=a?ms>*OnOV^m$iy71-0;nMR+J*>e_znuN>MLjVoTrQ;P;h zIc~3fQ&UK!l-#<7(A=N;-4w@o`hL!sy8(k+<$YJ2; ze?M`|GK(7+3s=Y%ZDx)g27R;n;!VvNz@fnhSr{nOycgGjM3~ha>1V%??hoQTQz!(Ypc#gPf$Xdj=W#m(j?+L!3 z{P^BU9M(EFI#gDsdlXSTlB*6*BsmGNRG^8WXCN0$F5}`E3H~>uu3g2NG3n^q3f0ab z4(KQH<1tQ;(2%fB3c~26qBYJRWjneQx8~B6{)pDGvsMcZU+BHxVa92VN)gj-=rLS| zq9Uo!)%noYfVea|%5n5ll1Blq(eeTHN_!;vx0~80w=+}5&-HkwFk-`vn%rawO<5B+ zubHM}t|Q zMIb``=}Z*wO<66$(-dPeHxD@H*2ip0Cac-b1mH90VGppbqEB_YwowFqJQl22 z2t6Iwy*hppeH5RgSL1s!67;AJ)ShutB2<)_MPWlChrdDQi$-#mW@Xf)kqgn6{CMuO zxLgk&6e2wou4dfAg{*-Z?N^L6d@n3~+dfo}`faT2-^l+hLNS1S2N;dBCU9 z`*RWCcU6APZ=cC$v+W2GnQEgs5j50Tq`kDcuLq@@*&YqWud=#@p@&RGBmhg?`7l9z zZlTf5eQAmc6Lj7l3oOTBZ|O)Lhn9j%pBa)0sKg#c-7f_NegMYBe>hWbh7wZ6PD$Eh zlX9Ex=KCvVal~QLnv$JM`3{7*k7i5Ipv{O=HnGkFZBX2y@tXrShqs@=J(fA-=Gm)W zJeNTE#ZMV1apx&1nda`y+(pQNwTQ4#T+m{}uF#=J*xs<9^FxrgA!e}`Cg(DrSd~dh zlDFks#?#IX+cfSRbqn~(~Qjym00KeZ@yk8g8M9ICxs6Epz@<2pQqCQfW zz;;r2cy^uIF>Jdq0#li~4e|7}g|W0dt0@%BMavqsQxA-rx-0?cljL^jX8LYFyc2O+mpGw^CNrA0gsdhox;JtQPJg zi|LeT*1CtoBGP$f|A$^rd0K7}DV{jqe@+ATjPAUZ5HPj* zw$=d1Z#EBnA*>aSa8;J4B6G<(lI6u+#R{@E{DDz4#$ce*jq#R^*Nn2EXuv=1cL`IO zyd0t?#NFj=4i~98FJK2p&cR3xVY&jf*qo3;PN$C*%=v5Os(E;>R=&fq0z{AAiRsL{ z$arfd9kG27vKT-jO%ig`4j8MRg z)tVn;G~q029ek)|<+8(=3ise4Mct|j;t5H`#+E_v5?2$c1(~Yiw{#7X&tY{Y^TvHE zEx|y@oHaDm4BqaF78^QI9SHc?D*)L?3FIV6Vf!P2x}__tdG@Yw-bmdOM7`KdPPgk= zGMG!x7c+OzLIyzV6U`p5Az8b|J_t4FKBe-rs-2&Hd6IoU|? z;t^N@_RAUSbP)RG_-L1JKFiA}AwoM)u6jsB3u9^~d?$90^5&e2Fq#y3^lP_rkUXc& z!EGLzV1eK%=qy%afVqRNHb;akS;co<%mJbj0Ji&atTr3q$72rt%EtQkA4!l05e7OY z+@508CJg_CJj%0?~6fDs=Ftps-!; z>{fJ=nHFXuyd1WJYswo9bp(koiPHGNL#V2>jIBn5X`~_97mFrAJc;4jLlZW5@UJy| zWO8Nxh&^B4y*6*`R&gE6^R;5-#uB7M{s#AVqO_!pzoQ959t_2S_E>5@V;ECtBHEe- zpm3ZZyXpo4Q~7%@0`d1q23HIEHMSO5?amiwD8_*?+`Np1;AwqG`;vkvQCjP44;CHL zckQ|!MJ+8633=(JvV`T1XKAT=S1MRf#5Pk1S~zr&@b=g7L~C2l8mQdK7q4=R^aM7r z$2J~-S;e*7i&auJgv`zaBc_4L&e15*3= z1dq=qVfRm2CN>aH0}|Fomjup85G>6X;EoPkpNnN4Jrs$7S5cz2a)9bco@I7|{gzd3HJ;MBG{HnLrj527TPT@2Uyy2QxKICo@}Baul8MYJayV6aos#gWNl3 zi{$@0k&ABsZl7KL`}{^om~oh5$@`tR!zC(l72&0lFottfqH|=7E3>CKY@}e98mD`G z%ojhu=aW+2(~T>ABoM_B9L7c8|9h85R2r?eOxr|z5=ZI=g_A_k?87~HO_id$${mOd z=|jVGo-W~;Yp)}PW=Hg}jS=S>wsXv_V6U>)_l3tj8=b|+zHhg1Zf|J`v)yETGl64u zoXF1+N;(wp!4t_r5*h=g{gaKmQ^!E$SK^BR|YAn9{bH?8i3nm7~KPm$NUzY+YPGAMpy4yW^oF`|$ zOOIB>O0{&liP7@{s4~m1)XF%DJ^7s--e$R-UE3}r5(-(|hr;WdIFBerU73>nZ7N94 z7&5T_lPk4D6fXSbUD=qd(_Zp2q@fqo#nFoutt5Z%S5t(@Cin?={qGATQq9egqE+0O zztVclcr>@Ppol{~N{Ck(i>N3SUccd@KAL`R8p-pne$LKSg;p4d$VW?RGW6VMe(^+g zQVOAg$yu-GP21Gu4`6lsQ)&>b4uuhRVfRa zy>!U!mp;@>I^)q|=t(aobP=mt>S|!Q*3!R!wRWH2fD_xXge1v?=4!DkzsrI| zc^l>_G<4(&VC|V{u7l(dv@Ee&BI7>REleW7igM#W&ihau?V@b1Ss9wlWYdnikWma| z*?ETTDRDaDj1rP{m&B8mSqw^QnB)xR{%l}xnv4VmV6I7;5)2MVL{Us6ykFIL#}re? zAfOMo(x!4p6zgg49d&tS}b@6}?VV+vKdyO%uC{}@!K zaYF(rlx2{E6?^I5+0^&rONK}!qAAuO;;9!soXcK3EJBG2 zdbX(IWi8)n1FzSwC<$jS+BSN?q=CwmnG20!*|#5jtR{OzpEHsz&sHfTBHwi9^&ZTz zYPfh7759&SC@whwqDuPl-^Zy=6>IT?NPE!$fTWaECT?POr0`YO)`}(N| zJ5)NO2@zYG@9L3HghXDdJB0~F4?kF`BtErEQEz1(Q<#q}cyZlOp9k1*F_`LuuiQ;dVs1#$tjE%FCwKpuVG?zO=E7E?YGep zTA;O$W97lx-D|JF!&MnWvLNyW0i%J`1K?2zChsNhzacNp3_kY&-O}lE(f=V~qE2dB z|3@=Tg}JO>)p5O$O$v$eM?_wKcSTFMw&1ug^ZRSBgll*B_oAEQG(n=}n*9_GA zvP38YtIe2$p7yEc{B(BB5f)jN)b2GB!8Zbdw*a85pU$8pY+|uT|3>0}k=`957g~Wo z6^ME(>~)+B?GRkFgS;aU2FP6!O(VD+Q)3Ij)5&;Mjn*}SWjOrZ{stCCi*x{?Q1aBi z1o)J5E_wrg*Yk%6yWz$5t7HL8f9*cN+e}D!%phH)9Ac0{J(H}J-B>e7FD?*)oQ#JG zo|#b%Qyw1`*4c=VE#rrlfEU;RX|fweM=)G9s}hbpEDeFx<)x7dCXK`arJdYu=8L#w zSbVx8#wXI#X$!D!$&BB|o{nVC~ZEr{N@TMdB}2ZTNRXcqE9Y02pkierN%;H{0^@Lt=2xX{~DfBc5bHLkrXEiDUX~ z+Dqy@_;zi9qp>k3qpJ8w)3c?WU#oQFPd&M?ljeVv2eM7L37|<2#oj8n^NKEUredg% zpU4R3_q`@k!rGWEI)RFUdh}hv*QwBfNE5#VCTzB%k`t!7v}HpylBX{)fs)D|AH9uV zmNo9$?u}>u%hVQ3qRXjcn(;GC zoTg}-rfE_H5^%SLrP{O~NF+{CMk95)M;T5Ovm1UEUd#a6Bm%u3r&^EMB#jbaYzHUs zHa~KfIZOa{s_^t~8tWlUoKbs>BswDEwZ9=O`dnfl(>=HJbXzfG%=9F0BsL@<`W7#>HbL|8zX+K7-oLHqY)!mp8KF zq&GjdpL=b>bpzbypE%0oKMk`umh!7C)3k_b(M5p<%&#NlXeKXaXyRx+#%af)6PgQk z-aRCJti~NbDj?!&)W=I|Q!m$9bWU*J%l~Axr2G6}sjueI!eHW@jR);qw?z6*TtO_L z>NQLMe8E`MvdLEWrveLxHZ{k8?5#}cMPrTgxejuz%w0|K5$%mi-txSga3>$O~2mJ33uR>T`~k5lgT^wVaMpi^l!il-Xw)Pm+cqS1%MxIpCdQd=$q{)FEP+ z?_87Ob)Wm($sOAxIfM;8pMy_d$4pIWpV;q!=VoVb~am#xhWOBngV-$A8= zINO1Nxz9wWh3b_<))tr`r*qLKjxzNB6DR`H;h0vgyjbcYrEwY|dY0Q*Vkl5pzkhD( zdz++e(so&h)B%1F^9F8m|3p7IJm;1cHp$X#HNok@cQwqxt->9hD2T+!JCv6M{gc=E zkEM5D(thU|RMy*~6lPrMi>YIEt_WD(*1^b)CKrMZX;shHD{{G}?9EUSqDh)}4LVmq z{@bYl2M6XWuytI;9-+|-IzO`pmr3bNT-iKVRbIH32O;n!>=asC>?Y?iT7A2?BQ|8Z zJ*lz2lLNGH)(jG3Q?vz0?apv`rwBM^KZ6YL_i6=xI8!s_fFY!|#3|URHVm2>Rqw+l zR}X~3JGs*vwG$%O+e=$Cs=rPMAb9?l3WY3kQzdj6g}il-%`clWlG_Bl+o(SD=D0un%J<9u*;O}KEO z&&+C1yC6A|U$b<3_V))A!5Z@$@H#vBL;|}cT)3$&FEN53ryf5fhoTrn_dm?eK~HL` z$Ls$G0hE#a@{oT9ba0$WWgeQqZCbj#QQGu0$hiy@0-QF@;yIUfh*5~*#8Ro{h&?>K zX$bERwH8KVZ=5$j3Rk{B>a~5r&}(7NmI?KsmttQlGmsv6Fn=depAG=8bboz*6EZ74 zuuY`@Q88ZK<>D8e>09o?)`nD}*jF_Jo{kjdzWZqgrIqGNbMSb8lFPgIYe{8?_ zx;-ncan^_ z>3<_<&e|WUWGgrW55!912I99+q6)D$ zUi&XA7A>sgo`=R%$n;U5}+Jv9Dd=SFADV(61exVRleG zDS2q~^i0!Tvq9QXs+=RmFtBZdBaVkOd?+GL6=Jeb3Sa zTo1AQ`I_`3V2ugW%772wWC_DKU9g*_4$Sk)535q&fk-oz7W)W^Ca}k)zXE9&Y>5WBq&i0_k%;?n zHuUe3#8Z91i%mApHIlYb4p!64(S(13e9mdbhIWkCz){mYVkZydtA9-tr3w+udc9@p z03j_J95c$LE)3GXFM~cJvR^|Lx3YvfWmrp21*B`GL1|5{i;((bi}q|XdOZW&=vZ(* z96`2tU1HAgsep50ZAG?+S~OCg(fjnB!oQOxiFZBdh0EbX!LNsV{G9;*A9{X%lX>`hyYg+X$s(&WV{GA;9 zo6`7u7XYi7)l1Bw~dQm$P>rZ_HON6Y-pPDC^i>J6-8`bGDxUYhoVA zi3bntRdJ-M$QBbqFF{rBX$){J;xPPqT(xBnvp&AI!K z0fnMxoc}8p^=BIr_OMqvH}nmH}wJV=@56;3Yrq@dP%5iU9JrE+&gyoxGk8QSG( z1o^O3H1p|40vx*5=TWcs8>-8jaa>0dfY-Kp20R$lQP^Xu8BED{f*{5s@^eBzmZlyoxcBO9OH^O^)T z^;v;k1LttVqx`31dkvQhz`O|zYwgYZ5tp5D`%`!MRgpf00HnXnNfGvZ2if)th_P9& zM@<4&R=9T_paC5C-3+%vF%BqH51J&sZxLso*x_tnk6v*~drsT`jC(VY1XaZ%*DNfmxYEZnKFBNM zGK`syaCaw&RLC*lq{{(jvd;ui@+7o6Vz&UMIi1RHBGJcXy+je_)+2EIP3^NNGIL*_ z+EQSd0C=c8L3jJz(II93B7RfgCQQv72l1z7-G0+UOtANhwXA3}<6|23L;y2D%)d07 zf@O9YUG4Vc0Qu_z%n*BXA0x1j|n|Wph<9s*-!P`R@^uk{xlWjw=eN0*)w9fNOSD05A*f5sBT>5G5t4?oY zgml4@qk+89-Q*y=YXkH~_t4Mv8v*@CqHuy&S_@Vlw)&x*>|F!u`GR#@(3KzX_#$ z&9vn9CqQNbJghHR3|o*`y-Jun@kf;F&k9`01L5cclTf_9B5h`)q{(C7xVAK37>9uT|0p_u94h2KhS97SjB&(4{Wvrf)3#1II9-N~ zmhl|W6J*s>Bjyaojmo$FP{6qkTG$S0p|Bjtq}RthOwG8?(mft-)hu1G{elZ|^mqc&r+qCSiXSM@b)SCa`FbQ$O^v(ImcL{GAq669yGJUi9=GheJd)u#(j#3u-_ zApTcumMStu8U(FZFUAhhZ7Pqq!tns85CYH`=6ahTbSs_;M-#K0-pqTPJ*Rczh<&GH zm?~*JEGZsKn`$dBgD}o`gh%v;vs?PqY*8O_{f}+~xSN)ePU3V)%rY?-QBrN2Ai zj`0+8Tf^?YRKRdFr601J1yV&*J$u}ccydkwg;2lme@QO#$#0Q@}XaRDu-m8W5$ zO9%IrPhmW^gP&T@oIwsprC%Kj)j%9ndu&^I_5s#-0|DWJVmgIGssdI=$NEl|nVQra z?}9U!Ck|*QQX;D-U|AO%864HdmpC$gt2`Kc`Fi6BqIQl--_MYRcBH=Ojst>sIUmVa z=Ikd{5|L`m2h=cWo6`*;Y=CYVF*xR9u)!3IA4Ml|y0LcaQV+@j- z3l};|omNm`-rs5;w+#cbGi+!a$!n`a^K=Y*3LW-zcdoA^p0F)9zFOp4%s( z8Atn8&3)2eWw3lPx7`v*)1>i_JS4oy-cw&L97lDr8NY4Xc#?U!+~DPr;q4i=Gtl_O z8<1U5$B$zdS*s&uq;Nffw;Z6y4Alcwi5S`NG3+#6)q3Q5#9}%GyPO&G+r=$d=T$Nl z*UTduc|<1`4lNT5%`&e@WG9|b9^|i&@bM7{*j=RSl~7c5=1E2PhV|CKTj+ck%N8%t zqpeicCTsx@Z-`ZscW!yabSF8pJYHWw>Gd8%Eh`qlFLZ5yM7Y&$K6I3cY<(_S3i&)9 z*Ct7$wN-WhZ*r#cCJAy;;v2uJ`ds907HUB95tu~Wae5T+;1pq+P!ff8TWelQjHReo z>e?7gu~W-M>`r@cuVWnVI*A_ujf3Anry)_@-IxK7bTrw;#uP+S<6foPKmn_Vy>mc& z>WzyOX#U6W{M(q{7erJy!nC1k?*SA1a_j`{mnuI>@db)C8nhy8=PuDaYfEJF*CQ>= z$1!`9vgEp)t(&S~2*$k!6~*k547HUkb-cLDFh1Y0xx>fHpP!D>D(dzvGwM57&n7fx zB9$YZgA7Q0JY2dp=6;D}Z>(#{RHPike9RZ_2tLFyf6SV)&t3u^iaja4HPriKv3Dbm zKTcU`z|xrDW;H>**B`YMpK01YnxqQdru77r&bePEjQ>V|*JTY4%(zd=i3{yp5(Ry9 zO-mM=ZJdRw6HGG$-thka97JfVgZTBe2)m9)_z$xUxhMWX&Vk>CUMIh3@7}$MOn#c( zSePi**og}e;xYJomUlIkpaok2^;NOrC5B4PPY7(I$wQPxRqlp9eAK&o3!+nVscvi}sqhvu#wXuusIJmLb1HxwFUppomqn~PA?8r$;0wSr|^ z_8NM}95yr>76N_sZQmGy;t6Eco@+&tv>Z4DzSb{rp5iMalvKlZ9cFr4iq)n>!mKr{ zUr;G8Om{){u9u}o(N-=sG85Ak%#Rs0UOq3*y+N0vtYWmL#+y`5_*#>*7F4Q_csYmJ zck=u2eh^GY{_|LgZZBp%kx!fVM<qqSr1`h$T=OVhNq{NPql5#D%_A#ulfZig}NQzD&r|Ri6W04`W!!@4~qo zClW58$f?Ie3)heZ4wUnF?;b=m#b{7e87tlPA zF-;hmwjc)Dcl0?lHMnCiXsYuDZ&Z|Jz1_g<>bBPdHPs^ABz=yMj?GWzQ0y+O*eu); z3e`0!LQGj6Ajlw3s8CgIq@WIrrggP0l9uCsTRY?no5j>X$WO{|dw`6FE#XMQI^U9Y zRU#~~ZxWh<%1zVQ2?Kf%AWUDEN1YUHz9**XnDj&Bx; zSsS68ALi?21>@+G%ou9sLKg=r$wln(-Z zOZiTwPp0v)@ndrg*(Xuun{eN&qW56rwg4C2|W9qb0eG zq_OuWZKU8{L)!rDiNeEDSjKjLW9(3{bthrQG9$f+*hQczhYM`lw}KBE;X>hxmC!SS zR%-0!zcdN!At6O?LUAp2PSF(3e%5YM3+}y_eHz1R8~bfg4A^b_g4VF^+Kf6!*l59M z)fKrJ!HV7%wj5Kb=~;GV!q%u!jF>190&2jeji25rkq+`7VAA!sCI&Q>@89$XrWdlm zEiv)H!G$MM^oYd*UymbuW@C*!zL%FkmXJ3cCwRPJF0FZ3WyUIq%$@DH=snQ%r32sv z^I>`)ko%W>OqXB4ke&DOqV-oBWsvlQ2JgYZ1RLs4?3!z+jtxNQb6aHzk@3T^u~(6F z)VUTnY5l^l8|~Vub}=e5i2xU57+2o;ZS*hV0$aNL%z}F+mqdDy|4M$7IMx=uUTaF4 zf?*CX#aFa&Qnxk}16#juV1U6u{-FuNB_sdKCf!a16nIR|E!JJrINv?iCg z<;uZ|U&`3XV-#X&2sV;LMIqOo6OESm-!r#V7?+C127S;OJGV<9gR3PBGmZ+L(eHSa z^Ker-Gl|9BzYTA0_ouohs(OJQ2w0$}@1&O?2d&V#x(h%uJPWeG3nYrwTC%q!HI6VN{LiCh-UHV#2V;RK6OY zFN?Ez5gd&q?&nO{lWKv|P2egs?qF3%^&!g_?7gh_Co5;kn{NW;L3+Oz(SBN0+Czu( zvg+0f9~WdhW+oNHMnziEXn5@@>!CkwF^GJ3+i-ebcr1%BDX4E;**#(nmUqldC`x zahS*mNAnHZWueW#)Yx2ihYaIN%^y$fcB*6aA;$;R(AlLTb${H3xi0|>yOAJPS8P6Z~q|@QzTU8eXTLCe37ev zn4!9-vG-a!Yaip%sXv~5E5@{St8|>p<4#q(iDsR=d6+U`6RUG7Md5M=0=6vn*K|Jh zo$rpRo@XhFO5BlqJ<<6{W;e7pfW$~sp$sJ$c8~I6lvkBe>Pa!@Kf=a+; z-nM^CnyWiR$GXE0#u;Omr?(e-HZ>32#4C=0^-S!5uJ%|h#@D#jRBYB$(UbnWI2q)Vl88Q|wz)|N&VXAN0Yr01AT zN;6t*N!sHkWU7=nGCGX^92=8PS4pe{N(fKrM~gU3Bg;iOu<_i*X;jkWIFOOic8G#k zjRE#kMc?_A{&f1V9{t7Rsx)%rmJ8s$s@9W&`gv33HWRT%Soho#i;LfB1;~5H!4byg z&Bx>w1z`@l4o1mH1^vcg2knwK&J%4$hQRCF-bj=GR-aQ~lME44Jgtm3+CxGFN%i6b zy%2~0bQ568USio|gm4Wc4pp6jF%4Q7G6=B}ESklR@D%*_{i)zPJL>G5p;O+<<3&lj zZJ9Z8+a*;>#ZdZn=}hPM?PpEQP7*F%YBNynKd9;m6xq(P1CSZfT7Bcp5@v^t`HfBf+J z-zkQJ6mN7FTAB_sVsoDcS zmsT}`4L`RtIbUMTX};Wt)KZIhy?}$~Co-ASp6|w7KWe01Gnr(}XiTOr=)b}ai`j@7)nooizZAKtaA-Sn>aCr8tdR9oQb>y$HxvG#kL&~`anlW1R$ni zL5>wmu1Cf%CNpuG_tH+SU`w=z3agn$OmMm{sme&|l)&x-*sHUX$Df`PF<#C8ba7fj*2 zkhxhSWFIqTmp{S&3IvN2SU+P#7nu`l0Jyib6#NpKj8G>AKQ7tzjECCS^psz{3VTGA z4FHe(QR4^B_vYHVnlp&{Od#3xY;x0cU3FiSicwc*e+VzKb^J-T5b}^ zNn%SKMD&xg@=Mg@LpuS^As`)j_cENq9dAIH%^22>#+o)I1y8T4Qz*|ZDC4bcT)x_o zut3Cq;Z5;l9hXeN<0Y^K{efHi=1&j;f0Q~WjsWFvHKLx<-2Yv&m(gy`-SQFH()gG> zAIU6T6Q53giW|YbfArV)0hLoz+{1ydBs?s-kXdquzU%o{mZMjwl81ruD-w9&J z=5%Au2F%C_aOplmzV@|3PT6w#+2I6!f=(Av<~xrYK=A1b}05 zhXVU(UGIAzNYIS7&8`>KBrGJK+6lo=3C>1y7IWKB5K(3OE8+{%Yj^#uoB&qwt*?pJ z&H-o@Xyugyz5tMW?q9ld;&N7y&sWM4aFEvU~fjFq~AHZ#FaCCWc$8){Ppta%W7^exs>23!_eGm2DMT{tRGMNbyQQHX}Q zhG0`2>R})r8qL^McKmztlPeC>^p=~HXb=zLF!zeIHUXy03|*L`JOmy0=1(1gNudw6 zj`o?42MXNCJ!PybmljX0*8gv%l83cguEx4|dJLjI#D5xvE4~pzS`||xsuFOooNBIY zt!uz=Y9MwF>ImN>Blxji$}TWk(fP5#cD&98UzGbEJz6v|fw;b+7KtHYy39@9`tJ+- zdNbp1a75zfUTB2r7wVe0B~z@{(%4+Z_ka4zXqtQ{g8?%ToGkhnAOtsD#(br<9fQrY z;CsmQmMcH}csFy`DC!8-v~Qmz9*FXBPZCd;T;ev{XcDmYf4vC7S#~==2%VnhxZ=M1 z60+%m7<^8@on3{x13_7eGcFu~xfmUEEj)f8YI3Ek@7yIY-HWGMT;wN((yiyYVhhGr zf^$8(F%)}l*??2y39dx&^l}s(4?7>=H3tRm1X-BZ9LdF7Wc@jPl3n)!Lj6?o4Lc~G zcbEIup}~J?PFf+9V|;yM!v9F%GbacXT|>Kqz%L2apm|y49iSsRjPH@Q_>xi3;dE+$ zSi3F)IB=7NkFf*(o!YsjlD?@t%Tuk!h=4EMSIWU=f5hOx5;ku}(PMsqvwsLe>Fnhg zeU#n-!mW1HZiFS4c-A^jR!bi%u+%opn4haXU{ML5GA5ti0KiL50+q42_!N9NEFw*% zbDW1ZICKm@G{tjr8yEST2V+0~U7`}~+w#PDGeWkR_zuUL(&&9_Jn?0^xX++7zZg3LWBq7R_zf zlKG_xwm)D8$vRHy^aUVX1_cT6qa4DJW~Y<-M-^ zyW%U{6NwPGT{CK(HA`g;R9T6CvtoK$o z^$f_-{Gdx5_~3`)PQq4)Th=m5ZzrI-nq@m~d%M;JwWJo1sD8-#m1sFO#c3z}On+36H_u+dbbh*&y! z+h2@Um;KvzUkYZ6Dpf$4ZY%?FD+f0z7>|RLSs#ax;;@LYT(4f2gk2wEZx6FY9#ubMZ{0eo6k*fc(>~RGyHg z+By>zW#%tY)65qdA&WQ-zF%mw^_`e803HK`w&?jrh&@{tS;=+TZf{L$XC+HZKO>H9 z=xXf|MLV%xb}KjG=`$vk^A!(|p1G`tJa6`h=@2^18Ih;PV}J$d?F`p7XV1RAWexD5 zI5JyRwrId2NCT@gDTMmsYGJ~iJWw7!=AEe3 zOE{J(kb`lw*i*FLb`ljnYDJExZoPS$;8gXasg?wqQgj_9Y@!463?zpDxX{LC2QB12 zBCs@HA{=)U33oHUqd(-n+Fh4(`V_K_@-H}ITj5+c3e!b>y2@qTKNODEHyHY*g*54j zmukg#-q2_tBad(g#BQ9RFxHV#g&x?K#iaUN>JZo;oW+1gJzoG*53Uo$0w|rQPF4Z} zVMFch&$QX7i4H*aO5{OO=(7xN%81oocRBG9Dw`$tU2_DWr0+tX72FZHyUID+z4lyP zj6oa=YQ_GPGZLI}R%2?bta&oG5ZSsG^SviZ><-2nb$kgPv}Z)c5bqYRCuyw`iLn=AH4WR&^=T;!-tvD;1Vj2B}@X7U+~>s#!bv?0W3- z}qO&f%yNMCce{gi9oVxXk)nMb3*H56PNS}J zE(Xzyfy)t$n> zC=%RrKR1^b$Jr6|nN@RoWErjv#;wupqC+;*qe_7bVn=HgavUth4$(DTd%2h~vb8X8 z6>6WL?pW9Lfv&=S=KHJnHPqzy`05T!T32DBuY;_MtS}Wi!`QfhHf8%4b$dLd;5iFU zT!hImJ#2P!Lg=?9XX^)nKYReK{wKp^*t%`;&Ni*AnfLn(b$ZnV@~Q5(-0s{yF`h0k z)O<40GRf5xgM$g1$DduD+6l=`7Vy9GMz!TaQx$ZPBG@sRq#rTe!^a(WZ$SMC1iJ3A zEDtkaa%ya{hsB&bTrQnUx%cyyJi;DdBBZ)MYhX{KRKS4@qX07=jWm1~|5PuJpS#}h z4}78tN@i0GAuJ&=>*SqXo`fZ~9GA+=#RDSjAHoan3 zjIvV_IfJPK($NFl!q-{a0qexVtru02oEzOZHkAwU?J#G!w4q9~oWo8+?28Sf+oK}k z8E-vZm&02?sd#}f9-Y7>PK%q;f_sfxa_g%#!No)U;g#(Rn!xcjU#V3wejD6_tBSPv zqQG1ET29KVG8k|b7%RWqk~-x3K;kFd^%oPe_@q!ob*24g=7@w zJ`=iiPmA^5;areOAHD|rse6rn|3|aaC=$>9 z5y&t2w%P(nWAO7`NJpY29Gm%Kp3D9YpOge&hf(+S!Vqjx*$?~UjG>*Yu_nUYLISaVs`B zM3r|P#ofY^=I5)iYSsHL^v33He{3mtdx8Jd@?7^8pv zurtxs#=0EMXA(xLwFHbgj*a)(s~Au{C&;Mq9+rcr@c3KcyzZNVU?KS8&Q>N5$IuBA z5QyZ9Wve7?(Li$V_g)MzIhb&vUA)qXpuP)&l{{Gx!53C;0fomHsbD4b<4#|CIDZT5 zzC0wj>5=nUWK9Yfvp*FDHkeTA5}eWGVj$Q%(LQFc$|nNwSVRWt`^Jorcr6%8OJAfY z>(F#vXN)ky)ZU-cUU@U{0S1$_G5Ci%bA>7V&Lbb?*5N%254D=7?|#MMI&?_7(fBBQ zL>Z8J;W}Fmjtqn(1VE%M$iK^?vS_Z!yD-i_kGo@nA0$qcFg+y3Jz+N>U(r@k5tio4 zxZYorsfb`LU9S+BiiLB(?e6Lljc>s zIC1K^0uRr{(ac0G%2s1|AMO`{Ip=v4k<54!xhL+A%|d*Jc?_`b*@z@Q!WdH@fs}+} z3oE!B$q6UudFFcNxmYCa^!NgK*>j^hpF?rrOr zou7(8RLn$9Y&7$v^0!G;vSF9vbdWcih^0ii@td-nf{=kBz&AFkOQ_24W=)G-6g*pZ z8saLVV~Juy=wW+|0}k4ICH_99vd!M*Xf2YxRwPhw2$pq#q`SU*UU;2@J8y+^e*Rl7 z5+^NqU<3Y>C^o>hkrn>F!w__FoBqxNOh_a!%18T zQ3NSUPu^(_t3G-5G#0;B?2M(Jf>nya$cAw2at~M)`p&4dEsst*g&F2XAQ-sM#6Nxl zD9Dj^DCu^>{PDkDVKVq{P8j&(HX=@ZSdH{VZmE%WOnCO`Ugj(^#((^N-Z(a8p(HFs z&H0J|9*&_;$R+wJqN^A~>dTwxDpo?Kr%ScPhY=cu7boF!+TRzkI)2WLTvx^D?x|d~ zf4t|lg&JN4z+B{c0l$8ad;bTl#<8;-R-66i;e&zz@RyC0yup5QZaa)RJSKkQGdxGc+x#SS^m7+_zOt zGnV8ITvIAV2M4VOQFNjvw^d=>kn7UaeyoPvArK0|E^2%9Hjvlhgx8VHKdy5TRG-HD zmd7=;rXYs26$JqZCG_v(9|F;cs%X#fG4#1rCDQI#Pbl z@kIm2sapPok9Zhrjxr2?xJI?WEADkl^gMYFmPk6gkqd_)ePfUApRX*I4zJDzqmV?s(Y@x0WC@Lg8Q5;qgIep z`Y|pdvoU`)V~*J^FvHq)4YL*#l9&WWPPMk|4CAkL%zK9~blPYy;B+orE;`O#vX0~+ zKARvzF;Z>a9d4w`V*1`r6X%qa`j2Elw*w{TqxtBN6l_XWs*~mHimp=!Jt(`g2kLU_ zaPBDwI<^n>_EUV+7TYhIwUDyC1T>?(wQNPgxZwu_e#t z5&tZ5gq#Gw#8KHi3}W8x!MqX6Vd+(&Gq{(tH#WZQ;V&E*G4I;I5sc&aA2m@DJ(??% zBhIDUts}6fnEzVKc(BlFV*`RqmzRbU>K$uU^oHV7Vw9%bLa<()!>rp)&)fp}A<-^? ziMeJ2Py*QGr^QNQujhN}1rHy?@oUd3sLxtk=^$Ew03c1Z6!u)Q>FcwfhT3m=ZBJw! zOUH>ArspGda`%D(Y4zN*kE54c=fio+s6`agkZw=i@xjZpF~;E9Z?s9KkBO4%oMbk# z9uK-SJQCGpE8ENFy{>A`sxLcTwNy6!9yM*~>Z{Znz$0kai*A1*`YMq;HZ7!}X)|uv z2OWQ$Co*8~b-VEwDP;g%>R;kfhm4vd5kUD38ScKM6V7^@8~g-xS6W2PHyQIpV8Kkd zjiah~+SH#Pm{bhm9p%iiQ1clFn9U@CLD!9ajLSUWEDMl}L#^w5l!C#PagS<8I(u#6 zym7^YDqX(SKRG`?GzaUts-LFntOV&)yE3N44K)l8`Ws*F*?V!D!|ODW=SVZG1Qsd zt|+8QTnU5OzV!f5OH`O_7<=VCmEC;mC#XpJg1fIsQWW(POy5kP8z$2cyk)wf=*&Ly zFX4!wdY@n_`%~wj)yp@|R*K|NZq$4(Qn3=ZE&m=i=3V0c{t+>6073xxl=i0sUYj4lcj=v8<-zLkxPLE$E_Wb<> zyRd8F>L1C_U3@$5*46n$-95aew{{i+0rLSofp94y1(bw8}e#D$+o(Cwa41YIDr*!ugH%tQbw1iC$;e}mPy<`)kgO0;+C_s z5D)BQP1xO4;z;Q}MMB(GzF$}9xqbFHQlg22L7<|w|2_DAk641uzJ?*@hJ<2sSuvl& zZSi+$Yic}xk3r{Yha6I&ZGhGQb8J6~8$(?9D?v*@L6#eO>8U9TDeMlgc3f#6`0vzT zxdF|26XaZM1<(zH{9k^-rJymsy)83`ncHuqL$cdR`9EqEWysXigR%qU4WO1L(s(#4 zdaFs#oOUWGm~9vHShw0OJ#l*3_M9uEEe_cdYOtMgQ3^opeY=<<*j?S?VoYPkxgHDy8-l8)f3VJ7;Xr&2 z7ru6W8P;6lD0U_^QF5e2)9KA|es`AlG9oO+1vWpZ=I2s%o7CqJ=j|tGY@0TUMhKza zSb@`oJ4c_9se#dq#R`%=_lsQ>kxg9|`uV+`f0FIa+Xo(grMZ5kTXW)E*v2OqVgh9s z`6PNX_B{agAq3|1JWLUuootAxJC~dXO?;u^20PORtix3nzwLI*KT9_Ne6f5?o^Qx+ z=S){7QqP{bPvErb5H^f0xzCV7fq(u$BFEmS9{?i7DB;<0<@{j=4AjZnoe8@QSV$bG zW`?h)T-O0lIQ>KM(c}`Sr)?yO-Q9&)-4;vyF~@?c0$oXkV0lkoQj{j9cfAhb&ULoj zyyqUhp4zPVDKvp*c#2A;ah$A0Q3Gi4o_)=6eSP_(nluAhEk=?s?A~8q1iXj&QxSjL z8jknQKd>Ibj*{?B30zW=jh~;~m}$n*T2C6_01JH5ly`*+D?&rg_yIIssaT9f(^Y>W zK}sWPIIhsWg1Qe`l1EC)q_%ya^CgQWn8?lV+jtPmwFd%=vTCHsfj(r)gb4s+lG5L( z-xFaxm>OD8A)7jCll%B@Bp1(8un$RLTg`L4=0hceyfSq8qPNds_j~We01F}poJ^9v6sUEOEJvV`4!ArgL0f1a~ZpeyDcoETpGo{P!&o@Zx_*s9j_ zikD9-gZ?k=%ON`QLY>{8vZMb+NV|okjFyD4f(1R|5KHO+H~7#+-}CR^_6im!83)IJ z&Ezj3W(;CDg$S9xcJY5-fY(GqaC`8(DQsZvN)^0 zsw)hb6wP<7R2d)xuk8nZrYVdAn*em1NIg~iVp0kPG-8jfxcA~iEo8QNtI4tHr z#VEyOGnFvl#h&Eh$0jgq>#nzH1v5q#){Pt&obLNx0;p-<4j4Ct^&ejRGYI8{t`akX z?7XrL=fDchCy6qH=(-vr-3@e3`C&Cm1!_u9*pVb1u1aEJRaF-h6K^VT$v`u;i7Lqa zRpmUL9-Afu$k?g8WWmmt>0&;{Y1){cX*R-?#wS#v?vDWzJ|+MwX*qE}5)#FcyJAh& zAZCU$2gfVPmM*)7^in4T5ba#IJN*q-Z69WGAp?XhI=f`g{M!q$o+vFh=F;|C3k6UU z@Ko$itV};Il$Cw3Pe7@QAoNzDXduDh{G7VX@wQm?^>oFOjdmoI+T-5uW^yOijP+mI z9#?Qr;8yW}hSLxR$5!P%y#I0+2|Kzie`Xe=1#7t&MKMX&T*>qd%^e*I;p70$zOn_G ztKBkfIU2S;=MH)wHQ0Xdz9^_*8Spa|?_5m$DeOv|>5`+C&gbFx$&`r04fwbLgnbU; zcr7+VkN#ow14);FQi0_#RW1Ki7sj21yo&sW9DDqVP@^2LV^gQ!I+IkJ&9|De5!3e@ zG`WRxc@j@GSEQiI7Y);H(HEw8a?xJ_tV5l}S*T-hnm!|%d(U_HtbfZ_z)n@TbedoT zhxc7VdLvlqb@sY$$Mj$5Uqu z9IO`2X1bJU&@F?w;cM;Rw13C~s_SqE2m41)dH+DasAXFnOtku)s=z$BKejklUgSbp z89*pc0<;me=V3Nl5xrq8qxfW{9hCvfL?;V19tHPJ zlOnbDTQM8B*3SgzEe24n7%V)ZfgVcoh}Sdgh&Tg8Cb9aOzpV8fqA#)x-|-^?2FdX$gW6l?A~?@yD? z+7t2t97LxxV+VEdvaYHQ`0*)ZM53dN1a;TP+F^Ywb`s$V771ia1C?AQKI$TYOyaoj zq6D_eZHTmHqXsBDQbqwRq`Fysr6rK2$WSjpw?)+X;r|<=(%PzR1TwQeh1S)v2d4;I zME94a{9q3gdpHLkg}ELvxWl5U%j+Al8k_OYUExy{HInCB!%*RxNVXsTZDN7YYVp}V z$hfe7&@w)V)jTYW!D0l4WqZ{#UE)pTK2{<(P_~e95JFq&)$@*9+>*fcp2w94r2jVxIVq_Nbhl=9amq7;lLLaujI>-}Chjg6@XB?#iHh|3m0c+K z5`in=Kq`2jiHj5-hNAP?+&H0(B(KqZhUGT^wB6`A*;cJb>9k7?*C4z)EZZZxmlq+=PA}Gat(c9`K8)#;336dZz92i1<$fae zwVg_QP*8snUOglpRvW8rVg~`0Tjk#fjc#A7HiEyQi7R28FZow-$v!N$bWXIv&e1%> z&1|QRfGWA27x<|$F0f2u1t(reAz;#@Hk=F=HC6?C3zr0Qq=D<_-r#Lizh|+6)Q{w8 zHD;ztPg;ZSb@aouSsY&VNpmZzlp0GYjkHOeFSa6S5nmd8s)}Vh`enEPYtJC~hwccHC~V!XqJfhxc;AT3B=>h!Z_#iiHl8hrb5oCfF2z7qrHmXiumG#8#uf?BW)qv zt~R4>P`CzwiA;C+z5?e_T;|o3Z_?c)1}?wR_NUzjfisIF%f+8=7v zW$Y5il~C{3Domwvge}xCqEc@6yShHk4f-mL496mGFO+wpBP613l45#4FaTgBWFjIo8e}Z`H<03yq|kdSajBW(mW_ z&14r+ksch9G&up=aBBF18nTdLMW^oNS(m3~Kd;oR|3A$+^Fb-y^)8uenj zUk;?y0-{_K=|)?4g;780=xKZ*Orsgx1++muU;f}dq75}q6SJ$3W9QjSIgt^a7uklc z=o}qsyCZtDLHjZqONZ??#QL@uc9)haQT`n^%oyj{9i)cKq|gJiY_-Nr^`S1% z?)Az9X9)67C`Ge8f7_4jVl9m)FWj*|5Q*(YjCh%R5%OQ8+}Rdp10iJpYrNy|v6z=r zuuL7t8sjx_o$xn?QGiwDKm%u?9`$(?A~ddNaf~rly#_>xNWn|?$`oYKHRMz+Oc5|N zcT@NOv$_F;e~>ARzlf`c3crzzLnA)|Avy71kj?;a?2jDQ9J>Y&ab-0R z-@*6XbJPXpAc2O)L44{{vNZEBmxfIL*?)+z3&0)DI{w?Omhr z55w$-Mkqk?hR_AqtU#b_RbdfO<@CpTeI*k4H{G>lLUe>{{9>yySzw&-!Fb>$8c_?!Ycu;}KoF2ImEVPPHv%pu*qy zi?c|(hMo1rZB$VpD8}*B z8?kD;2=m~(%vK*y7)^3S<>wxS9mx!w-6*$`(-pB*jfbxqlTVoPmQN4~PnOJ0lea&= zBXoAwdeo1a24!2p4qZ61to0p6KREjmz@r(A_)-Z@ae{c``8O&LEvEBgivb$gw!Ru; zLP3;Le#3@%XVtI3*Hk~XIq-gE4UG%7ZLE<==&D1C>CV*z7e>Qw7BljD{|r4)*-Q#mI~PG({K-I-i^0;hl*PXtRf+ zBR0&}!#ZZc3cpQlsG!;iAJk@i8KGGP==EWnKdmoPux?xtk~49ekm=PDLB=&iDy}w; ziuL`%n5b__*GodqidBGhO(|c0lLg2T6+?oXZqwRoIGOPiuQ2S+GJdkw}1f%mB`RB{NPzSlN+7P8_&Wa1D|K|AcknujyK}|6 z-aWkJW!!jDHV@B>S-pUS=F-l-t)A7rZ?;r*xr7!9EJU)K=j8AJ4D^f|HzLefFRP!C z)QPs{&;l8hVFoYbbxX3J3~N}BqL`~}GM})qg+7>t^bqjTduhf4LV-`~BR>rx1BN@d zkW4l#r0u$f$<`S&kEv8Ld&^i+fPQ|~GkLbVe!B)L5I>AAOdzhd4&aJX^-rYTk_(*W z7Hkx=$?y63qYi?1ex7gS>Ll>9zZ(3dtHk15YE)OD8F5cq;ieVV=-)Pdc_%;H4Ny^fD0j!(i%EyqGu2a-%-l!?TE9S;Ui$xA&U!oDHGDRvd%uO$ zC`fXnf$^^ONe5?#9l|~S6QXf*pa4hB`ebog6ErV-TnwMwSFgZG+nyT%iXMh9(|tx> zyfAJcTa$b7j>9kFKyDef7s;P|!?|im_HENoUmn}`5GXU(TgPR=tEo}^M;QBpc%`hh zTFAuXh|6NyZ~DVftWFOop>pwN)m|{rUR}pHW9J&7_uxSzii!%GG*{Z}r#4NJa?omz zW(CeMwdSF5^mgcFY1UNB27MJQa%)#yhXrnXOljj#arxPEHpOCSc-ZGq1=}61_1vQ8 zOnl#fVlMYnbWoS*eHa5m3&6B~s4VqCQw6|I7LBue12iqn8DCw4+)$q*NlZD8vUnHPVJq_=zNkXj7YM`#B z>}(}}y-Yx6^FXl0Lrh$rk0IZR0O4ozkq|OA{IfW7D8Z~iY&#OFI~+i^-lU_#XO2M( z{b+z{-HDdM@5icIS#hWc*9u36SCIpwy~UidvFAD?40*uxIn_KH3%^vFW+^+-UP?OZ z{HyZe-Btndz#{5_5AEymWgU|BC#wnD8MYAzei*fNV<*bPO!<0hm(5ALjwB;%zZMHW z1U3n2FMs6LR9)P6Jg`|Ucod-r^+K{$jnWUxOK4}}BP>P95csn#`ILN34 zVjD^u_C=zXXEsRY!zPqL6AK*@STh`UMRkUWV%1CEMWX**7H05e5HnSnlO)G3 zOrkUo{$@S@2JyHx{&J>SeYcd^o!$~gQJ!nximvmD1P#3yB!yJjtuL~9lp(rrh!{c= zLi4tWFmlBqL)kzjNY|;o8VTp~5HW0@Wp6=dPti-Db4{`+xXRI9n=^eLLeL!+k2285 z_QQggiFvA2FVFrj%9y#V{qrlq1{kmEk9nSwv>9q&#z$V~YItZ55}Ii?yclvv?!wI~ zNC7Ac*Y(EuxNpGP>pp%(sdh27o!5hz*_C6W>*VoS?5ykBOaWt{nb;L}Lj+k2%EN7$ z^o)JMJN>S*slYzX*u#CR=7A=-MC|f8nMm!Y!LqBXz4$X z*+vh1h-2~=+G-wv=7@t$ABlbQmRSfIC;rV>go<(iySfmeWTnO&4irj)fQ!(8>PZ9DJ%seq!nM^9L!RmWmJo^wB z^8qItruerf;UIE9W5!wkQlmGxZ=eN+sgKFuFYWR5`nZ)jKu=fMhan8qt)xKkRCndXMhIoFdf(#$0>lQAF(pL`iq^Rp7W~Rqd+!oBV-& zLOq}ijoG-YI?1_rRJ=sdr^Wg8n%AM@Jp~xoi+35ZLiPjBpYRf~ATH9>c5@XQuI6)U ztK?-0QOW8V6+3+@Dy{hg)@+V4`{g6P+9lokW~bQQTHUETox+GNW4ScVeH2io2p0QxtTEDuyhLl!#^9zMtSDuQgkjxlF37js<5b_CThUYXBC;4ob!g!CF#h8t|gU~jTQ{EU}yxH|b z^&C(<)5>UX_OioN1MMG4<=Fdpj5#uYY=hq|kin#wG_|G>&Es|PJQA{z_ahlh!8-#) zR4@wF6kGe371-&E;r`7x)lXJ1%nHZSMtyThPZh5cLAq*Vh`70)^zlr)p_-}IMw9(! zH~pm(_BLNzinvKirO2oV<_sLynWt3O+&g*aEzxN1uB{q!=IL<9k})WUbI{O`XXYosqcrXLIBu{_X${7y&brN35M`1NE~W zZ;ul0Sw&J)z88YdUYVw3SDHw}Pi6~Xkk7H7x40I8MYtL-mm?fdVFF?Lkn|!Nv>wHjJxD)Zbw&8NdTbZ2T{2g zGo|P?sdHp}9-Gs2yd&Ze1E^4bDd=dJyuPax2v<^YAXr2>j@Q9o=)W)e7k|iAA*B72Q=GxfO(yADCU36ax&PnwM$cL8H}Vvli{cDnf6c%YlNC5e(}T%Tuty#}=y9|^@bK6_v1 zQ$?rj&m|;UVP!&ijzhY*3CATkReJEyCHFEsQu;({TekH?Q$VBf_UEN%NiGpDdc}G| zEPdf4DsyA@pL8mxqSP(Vy9Ov9*vPh$cYDsFT}>HLNL$%A%zZ}(CZb7^`xX?XY$do4 z^?1QMUld}-Lq$1S!@g>h?mGhvoSEx-LW7>hA>;f}>%$ z>*VK=c=p^+sm9{RoKPXpF(=3`8&RHAp?y_~jrD&IEqFGN-VjJbTaE>oTYf9OqT;=1 z3TSj_;Fy$>IZXQ;?;;M5%{cJv89ruO{Z;DDANgeaaHzS>#ck_&rAHmr>WGS-?XHqO z&-vVNnC<2vieq1X>Y`J8MdNekDrtzL`E}r9z}AL~wft$D-L8Jcw2YG3L(m>@v5-VJ z)}+J}Gs|8|ll%h9riwYhSsUhC82sJ#|)23v>AYuYPnt(dsI-25KkQl z!I8CIXZaV9Q?9v&MHhyD;amptsf!WpER>nbI<^GqGtnyxyBV;d8TyszOryUNB#R+9#gq9ztyhC%6zRo3K{@XT#6)Ab?ye zR4}ao8YJyeaZQ2ihjy z(Fi43j4cgh(`y(#5aGIwJ!MJaFjQ9EDPgz6BD{hEYYubn5=hKb>6y14=9pn-Mk4>lTQT zrY6I+(rH0jcSk@$l{&z3y(_60zQcbT&7G+mJw;#x#_2z5uXOWnF1up~b~KN;T);`# zm{$vhvZX!1Dv{A=ihqebdSB9trB7Dj2RXCr&!XCT@OsN~d|W3jrPST!td9>803zr57cZ4L+XPk`mXccG7H)tif + + image/jp2 + JPEG2000 + JPEG 2000 Image File Format + 1 + false + jp2 + + audio/x-aiff AIFF From 71ababf728b723dc28fc5149df39622e0cfa8a20 Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Wed, 8 Mar 2023 17:22:26 -0800 Subject: [PATCH 066/686] Updated bitstream format test --- .../org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java index 1a6cc29ca75c..d5798ba5a3f0 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java @@ -56,7 +56,7 @@ public class BitstreamFormatRestRepositoryIT extends AbstractControllerIntegrati @Autowired private BitstreamFormatConverter bitstreamFormatConverter; - private final int DEFAULT_AMOUNT_FORMATS = 81; + private final int DEFAULT_AMOUNT_FORMATS = 82; @Test public void findAllPaginationTest() throws Exception { From ac48b2d1fd529d3e5c2e1030ec921d7c965b8ce9 Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Mon, 20 Mar 2023 16:03:57 -0700 Subject: [PATCH 067/686] Fixed typo in bean definition --- .../data/dspaceFolder/config/spring/api/iiif-processing.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml index fa203fe568e6..85e49239156c 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml @@ -6,6 +6,6 @@ - From 82a0ccf9ba2380498cda6219b62ebf83cd201b15 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Thu, 23 Mar 2023 15:06:32 +0100 Subject: [PATCH 068/686] [DSC-963] Fixed webjars classpath --- .../src/main/java/org/dspace/app/rest/WebApplication.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java index b04b6ebc9ee9..c67830e92573 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WebApplication.java @@ -214,7 +214,7 @@ public void addResourceHandlers(ResourceHandlerRegistry registry) { // Make all other Webjars available off the /webjars path registry .addResourceHandler("/webjars/**") - .addResourceLocations("/webjars/"); + .addResourceLocations("/webjars/", "classpath:/META-INF/resources/webjars/"); } @Override From 73a9fac5e907cb9d104c0cb7cc1402fca5543754 Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Thu, 23 Mar 2023 13:00:20 -0700 Subject: [PATCH 069/686] Modified converter service --- .../app/rest/converter/ConverterService.java | 11 +++++---- .../java/org/dspace/app/rest/utils/Utils.java | 24 +++++++++++++++++++ 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java index 0f7b47239e3f..e83790495146 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java @@ -202,17 +202,18 @@ private Annotation getDefaultFindOnePreAuthorize() { * @throws ClassCastException if the converter's return type is not compatible with the inferred return type. */ public Page toRestPage(List modelObjects, Pageable pageable, Projection projection) { + if (pageable == null) { + pageable = utils.getPageable(pageable); + } + List pageableObjects = utils.getPageObjectList(modelObjects, pageable); List transformedList = new LinkedList<>(); - for (M modelObject : modelObjects) { + for (M modelObject : pageableObjects) { R transformedObject = toRest(modelObject, projection); if (transformedObject != null) { transformedList.add(transformedObject); } } - if (pageable == null) { - pageable = utils.getPageable(pageable); - } - return utils.getPage(transformedList, pageable); + return new PageImpl(transformedList, pageable, modelObjects.size()); } /** diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index 88278b531c14..c3f748d88e43 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -165,6 +165,30 @@ public Page getPage(List fullContents, @Nullable Pageable optionalPage } } + /** + * Returns list of objects for the current page. + * @param fullList the complete list of objects + * @param optionalPageable + * @return list of page objects + * @param + */ + public List getPageObjectList(List fullList, @Nullable Pageable optionalPageable) { + Pageable pageable = getPageable(optionalPageable); + int total = fullList.size(); + List pageContent = null; + if (pageable.getOffset() > total) { + throw new PaginationException(total); + } else { + if (pageable.getOffset() + pageable.getPageSize() > total) { + pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), total); + } else { + pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getOffset()) + pageable.getPageSize()); + } + return pageContent; + } + } + /** * Convenience method to get a default pageable instance if needed. * From 2b8c1ac6041eb65ff401a002b77709d7ded3fd92 Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Thu, 23 Mar 2023 16:11:50 -0700 Subject: [PATCH 070/686] minor refactoring --- .../java/org/dspace/app/rest/utils/Utils.java | 37 +++++++++++-------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index c3f748d88e43..2c83238d8c63 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -98,6 +98,7 @@ import org.springframework.data.domain.Pageable; import org.springframework.hateoas.Link; import org.springframework.security.access.AccessDeniedException; +import org.springframework.security.core.parameters.P; import org.springframework.stereotype.Component; import org.springframework.web.multipart.MultipartFile; @@ -151,16 +152,10 @@ public class Utils { public Page getPage(List fullContents, @Nullable Pageable optionalPageable) { Pageable pageable = getPageable(optionalPageable); int total = fullContents.size(); - List pageContent = null; if (pageable.getOffset() > total) { throw new PaginationException(total); } else { - if (pageable.getOffset() + pageable.getPageSize() > total) { - pageContent = fullContents.subList(Math.toIntExact(pageable.getOffset()), total); - } else { - pageContent = fullContents.subList(Math.toIntExact(pageable.getOffset()), - Math.toIntExact(pageable.getOffset()) + pageable.getPageSize()); - } + List pageContent = getListSlice(fullContents, pageable); return new PageImpl<>(pageContent, pageable, total); } } @@ -175,18 +170,30 @@ public Page getPage(List fullContents, @Nullable Pageable optionalPage public List getPageObjectList(List fullList, @Nullable Pageable optionalPageable) { Pageable pageable = getPageable(optionalPageable); int total = fullList.size(); - List pageContent = null; if (pageable.getOffset() > total) { throw new PaginationException(total); } else { - if (pageable.getOffset() + pageable.getPageSize() > total) { - pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), total); - } else { - pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), - Math.toIntExact(pageable.getOffset()) + pageable.getPageSize()); - } - return pageContent; + return getListSlice(fullList, pageable); + } + } + + /** + * Returns the list elements required for the page + * @param fullList the complete list of objects + * @param pageable + * @return list of page objects + * @param + */ + private List getListSlice(List fullList, Pageable pageable) { + int total = fullList.size(); + List pageContent = null; + if (pageable.getOffset() + pageable.getPageSize() > total) { + pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), total); + } else { + pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getOffset()) + pageable.getPageSize()); } + return pageContent; } /** From d3e70c4a0779d191b59a7dd19f3f801092322da2 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Fri, 24 Mar 2023 08:34:45 -0500 Subject: [PATCH 071/686] added exception handler for when maximum upload size is exceeded --- .../exception/DSpaceApiExceptionControllerAdvice.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java index 1cbfd5c63256..5e32247ee4ad 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java @@ -42,6 +42,7 @@ import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.context.request.WebRequest; +import org.springframework.web.multipart.MaxUploadSizeExceededException; import org.springframework.web.multipart.MultipartException; import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; @@ -97,6 +98,13 @@ protected void handleWrongRequestException(HttpServletRequest request, HttpServl sendErrorResponse(request, response, ex, "Request is invalid or incorrect", HttpServletResponse.SC_BAD_REQUEST); } + @ExceptionHandler(MaxUploadSizeExceededException.class) + protected void handleMaxUploadSizeExceededException(HttpServletRequest request, HttpServletResponse response, + Exception ex) throws IOException { + sendErrorResponse(request, response, ex, "Request entity is too large", + HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE); + } + @ExceptionHandler(SQLException.class) protected void handleSQLException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { From 882373faa133fb5bd104f033a9082b35f9419498 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Fri, 24 Mar 2023 17:14:06 +0100 Subject: [PATCH 072/686] [DURACOM-125][#8736] Failing Test ItemServiceTest bug: - MetadataValue list once modified should respect a similar order of the `@OrderBy` annotation inside the `DSpaceObject#metadata` field. --- .../content/service/ItemServiceTest.java | 172 ++++++++++++++++++ 1 file changed, 172 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java index 579feaac3175..50b4d3f3b48e 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java @@ -9,6 +9,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; @@ -112,6 +113,177 @@ public void setUp() throws Exception { } } + @Test + public void preserveMetadataOrder() throws Exception { + context.turnOffAuthorisationSystem(); + itemService + .addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, one", null, 0, 2 + ); + MetadataValue placeZero = + itemService + .addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, two", null, 0, 0 + ); + itemService + .addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, three", null, 0, 1 + ); + + context.commit(); + context.restoreAuthSystemState(); + + // check the correct order using default method `getMetadata` + List defaultMetadata = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + + assertThat(defaultMetadata,hasSize(3)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, two", null, 0, defaultMetadata.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, defaultMetadata.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, defaultMetadata.get(2) + ); + + // check the correct order using the method `getMetadata` without virtual fields + List nonVirtualMetadatas = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY, false); + + // if we don't reload the item the place order is not applied correctly + // item = context.reloadEntity(item); + + assertThat(nonVirtualMetadatas,hasSize(3)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, two", null, 0, nonVirtualMetadatas.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, nonVirtualMetadatas.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, nonVirtualMetadatas.get(2) + ); + + context.turnOffAuthorisationSystem(); + + item = context.reloadEntity(item); + + // now just add one metadata to be the last + this.itemService.addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, Item.ANY, "test, latest", null, 0 + ); + // now just remove first metadata + this.itemService.removeMetadataValues(context, item, List.of(placeZero)); + // now just add one metadata to place 0 + this.itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, Item.ANY, "test, new", null, 0, 0 + ); + + // check the metadata using method `getMetadata` + defaultMetadata = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + + // check correct places + assertThat(defaultMetadata,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, defaultMetadata.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, defaultMetadata.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, defaultMetadata.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, defaultMetadata.get(3) + ); + + // check metadata using nonVirtualMethod + nonVirtualMetadatas = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY, false); + + // check correct places + assertThat(nonVirtualMetadatas,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, nonVirtualMetadatas.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, nonVirtualMetadatas.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, nonVirtualMetadatas.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, nonVirtualMetadatas.get(3) + ); + + // check both lists + assertThat(defaultMetadata.size(), equalTo(nonVirtualMetadatas.size())); + assertThat(defaultMetadata.get(0), equalTo(nonVirtualMetadatas.get(0))); + assertThat(defaultMetadata.get(1), equalTo(nonVirtualMetadatas.get(1))); + assertThat(defaultMetadata.get(2), equalTo(nonVirtualMetadatas.get(2))); + assertThat(defaultMetadata.get(3), equalTo(nonVirtualMetadatas.get(3))); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + // check after commit + defaultMetadata = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + + // check correct places + assertThat(defaultMetadata,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, defaultMetadata.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, defaultMetadata.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, defaultMetadata.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, defaultMetadata.get(3) + ); + + // check metadata using nonVirtualMethod + nonVirtualMetadatas = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY, false); + + // check correct places + assertThat(nonVirtualMetadatas,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, nonVirtualMetadatas.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, nonVirtualMetadatas.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, nonVirtualMetadatas.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, nonVirtualMetadatas.get(3) + ); + + // check both lists + assertThat(defaultMetadata.size(), equalTo(nonVirtualMetadatas.size())); + assertThat(defaultMetadata.get(0), equalTo(nonVirtualMetadatas.get(0))); + assertThat(defaultMetadata.get(1), equalTo(nonVirtualMetadatas.get(1))); + assertThat(defaultMetadata.get(2), equalTo(nonVirtualMetadatas.get(2))); + assertThat(defaultMetadata.get(3), equalTo(nonVirtualMetadatas.get(3))); + + } + @Test public void InsertAndMoveMetadataShiftPlaceTest() throws Exception { context.turnOffAuthorisationSystem(); From b1c1edc7f0fc12ee0b2bdeedf463b50ad3e7d2c6 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Fri, 24 Mar 2023 17:34:02 +0100 Subject: [PATCH 073/686] [DURACOM-125][#8736] Preserved order of modified MetadataValues feat: - Introduced new Class with static methods usable to order `MetadataValue` lists; - Introduced ITs also for `PatchMetadata` replace operations; - Introduced new method `DspaceObject#getMetadataFieldId`. ref: - Replaced old sort method inside `ItemServiceImpl` with the new one --- .../java/org/dspace/content/DSpaceObject.java | 10 +- .../content/DSpaceObjectServiceImpl.java | 32 ++-- .../org/dspace/content/ItemServiceImpl.java | 37 +---- .../org/dspace/content/MetadataValue.java | 9 + .../content/MetadataValueComparators.java | 51 ++++++ .../org/dspace/app/rest/PatchMetadataIT.java | 157 ++++++++++++------ 6 files changed, 203 insertions(+), 93 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java index 1ac88241f4a4..59217a109f66 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java @@ -48,6 +48,12 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity metadata = new ArrayList<>(); @@ -116,7 +122,7 @@ protected void addDetails(String d) { * @return summary of event details, or null if there are none. */ public String getDetails() { - return (eventDetails == null ? null : eventDetails.toString()); + return eventDetails == null ? null : eventDetails.toString(); } /** @@ -145,7 +151,7 @@ public UUID getID() { * one */ public String getHandle() { - return (CollectionUtils.isNotEmpty(handles) ? handles.get(0).getHandle() : null); + return CollectionUtils.isNotEmpty(handles) ? handles.get(0).getHandle() : null; } void setHandle(List handle) { diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java index 24778824bfb8..2119959073f0 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java @@ -126,6 +126,11 @@ public List getMetadata(T dso, String schema, String element, Str } } + // Sort the metadataValues if they have been modified, + // is used to preserve the default order. + if (dso.isMetadataModified()) { + values.sort(MetadataValueComparators.defaultComparator); + } // Create an array of matching values return values; } @@ -542,7 +547,7 @@ protected String[] getElements(String fieldName) { int add = 4 - tokens.length; if (add > 0) { - tokens = (String[]) ArrayUtils.addAll(tokens, new String[add]); + tokens = ArrayUtils.addAll(tokens, new String[add]); } return tokens; @@ -603,21 +608,18 @@ public void update(Context context, T dso) throws SQLException, AuthorizeExcepti //If two places are the same then the MetadataValue instance will be placed before the //RelationshipMetadataValue instance. //This is done to ensure that the order is correct. - metadataValues.sort(new Comparator() { - @Override - public int compare(MetadataValue o1, MetadataValue o2) { - int compare = o1.getPlace() - o2.getPlace(); - if (compare == 0) { - if (o1 instanceof RelationshipMetadataValue && o2 instanceof RelationshipMetadataValue) { - return compare; - } else if (o1 instanceof RelationshipMetadataValue) { - return 1; - } else if (o2 instanceof RelationshipMetadataValue) { - return -1; - } + metadataValues.sort((o1, o2) -> { + int compare = o1.getPlace() - o2.getPlace(); + if (compare == 0) { + if (o1 instanceof RelationshipMetadataValue && o2 instanceof RelationshipMetadataValue) { + return compare; + } else if (o1 instanceof RelationshipMetadataValue) { + return 1; + } else if (o2 instanceof RelationshipMetadataValue) { + return -1; } - return compare; } + return compare; }); for (MetadataValue metadataValue : metadataValues) { //Retrieve & store the place for each metadata value @@ -634,7 +636,7 @@ public int compare(MetadataValue o1, MetadataValue o2) { String authority = metadataValue.getAuthority(); String relationshipId = StringUtils.split(authority, "::")[1]; Relationship relationship = relationshipService.find(context, Integer.parseInt(relationshipId)); - if (relationship.getLeftItem().equals((Item) dso)) { + if (relationship.getLeftItem().equals(dso)) { relationship.setLeftPlace(mvPlace); } else { relationship.setRightPlace(mvPlace); diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index de5826fa015b..a290cb0d995f 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -12,7 +12,6 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Comparator; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; @@ -288,9 +287,10 @@ public Iterator findAllUnfiltered(Context context) throws SQLException { return itemDAO.findAll(context, true, true); } + @Override public Iterator findAllRegularItems(Context context) throws SQLException { return itemDAO.findAllRegularItems(context); - }; + } @Override public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException { @@ -1054,7 +1054,7 @@ public List getCollectionsNotLinked(Context context, Item item) thro List linkedCollections = item.getCollections(); List notLinkedCollections = new ArrayList<>(allCollections.size() - linkedCollections.size()); - if ((allCollections.size() - linkedCollections.size()) == 0) { + if (allCollections.size() - linkedCollections.size() == 0) { return notLinkedCollections; } for (Collection collection : allCollections) { @@ -1149,6 +1149,7 @@ public int countItemsWithEdit(Context context) throws SQLException, SearchServic * @return true if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem * @throws SQLException An exception that provides information on a database access error or other errors. */ + @Override public boolean isInProgressSubmission(Context context, Item item) throws SQLException { return workspaceItemService.findByItem(context, item) != null || workflowItemService.findByItem(context, item) != null; @@ -1179,8 +1180,8 @@ protected void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso, if (!authorizeService .isAnIdenticalPolicyAlreadyInPlace(context, dso, defaultPolicy.getGroup(), Constants.READ, defaultPolicy.getID()) && - ((!appendMode && this.isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso)) || - (appendMode && this.shouldBeAppended(context, dso, defaultPolicy)))) { + (!appendMode && this.isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso) || + appendMode && this.shouldBeAppended(context, dso, defaultPolicy))) { ResourcePolicy newPolicy = resourcePolicyService.clone(context, defaultPolicy); newPolicy.setdSpaceObject(dso); newPolicy.setAction(Constants.READ); @@ -1222,7 +1223,7 @@ private boolean isNotAlreadyACustomRPOfThisTypeOnDSO(Context context, DSpaceObje * Check if the provided default policy should be appended or not to the final * item. If an item has at least one custom READ policy any anonymous READ * policy with empty start/end date should be skipped - * + * * @param context DSpace context * @param dso DSpace object to check for custom read RP * @param defaultPolicy The policy to check @@ -1611,7 +1612,7 @@ public List getMetadata(Item item, String schema, String element, fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true)); fullMetadataValueList.addAll(dbMetadataValues); - item.setCachedMetadata(sortMetadataValueList(fullMetadataValueList)); + item.setCachedMetadata(MetadataValueComparators.sort(fullMetadataValueList)); } log.debug("Called getMetadata for " + item.getID() + " based on cache"); @@ -1653,28 +1654,6 @@ protected void moveSingleMetadataValue(Context context, Item dso, int place, Met } } - /** - * This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element, - * MetadataField Qualifier and MetadataField Place in that order. - * @param listToReturn The list to be sorted - * @return The list sorted on those criteria - */ - private List sortMetadataValueList(List listToReturn) { - Comparator comparator = Comparator.comparing( - metadataValue -> metadataValue.getMetadataField().getMetadataSchema().getName(), - Comparator.nullsFirst(Comparator.naturalOrder())); - comparator = comparator.thenComparing(metadataValue -> metadataValue.getMetadataField().getElement(), - Comparator.nullsFirst(Comparator.naturalOrder())); - comparator = comparator.thenComparing(metadataValue -> metadataValue.getMetadataField().getQualifier(), - Comparator.nullsFirst(Comparator.naturalOrder())); - comparator = comparator.thenComparing(metadataValue -> metadataValue.getPlace(), - Comparator.nullsFirst(Comparator.naturalOrder())); - - Stream metadataValueStream = listToReturn.stream().sorted(comparator); - listToReturn = metadataValueStream.collect(Collectors.toList()); - return listToReturn; - } - @Override public MetadataValue addMetadata(Context context, Item dso, String schema, String element, String qualifier, String lang, String value, String authority, int confidence, int place) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index 9ff3cb9ec2af..c3deaacd804c 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -19,6 +19,7 @@ import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import javax.persistence.Transient; import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; @@ -171,6 +172,14 @@ public void setMetadataField(MetadataField metadataField) { this.metadataField = metadataField; } + /** + * @return {@code MetadataField#getID()} + */ + @Transient + protected Integer getMetadataFieldId() { + return getMetadataField().getID(); + } + /** * Get the metadata value. * diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java b/dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java new file mode 100644 index 000000000000..306258f36a64 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +/** + * This class contains only static members that can be used + * to sort list of {@link MetadataValue} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public final class MetadataValueComparators { + + private MetadataValueComparators() {} + + /** + * This is the default comparator that mimics the ordering + * applied by the standard {@code @OrderBy} annotation inside + * {@link DSpaceObject#getMetadata()} + */ + public static final Comparator defaultComparator = + Comparator.comparing(MetadataValue::getMetadataFieldId) + .thenComparing( + MetadataValue::getPlace, + Comparator.nullsFirst(Comparator.naturalOrder()) + ); + + /** + * This method creates a new {@code List} ordered by the + * {@code MetadataComparators#defaultComparator}. + * + * @param metadataValues + * @return {@code List} ordered copy list using stream. + */ + public static final List sort(List metadataValues) { + return metadataValues + .stream() + .sorted(MetadataValueComparators.defaultComparator) + .collect(Collectors.toList()); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchMetadataIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchMetadataIT.java index b78436f1fb38..58781cf589be 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchMetadataIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchMetadataIT.java @@ -11,8 +11,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; @@ -23,8 +23,11 @@ import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import java.util.stream.IntStream; import org.dspace.app.rest.matcher.MetadataMatcher; @@ -50,6 +53,7 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.services.ConfigurationService; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.After; @@ -63,6 +67,13 @@ */ public class PatchMetadataIT extends AbstractEntityIntegrationTest { + private static final String SECTIONS_TRADITIONALPAGEONE_DC_CONTRIBUTOR_AUTHOR = + "/sections/traditionalpageone/dc.contributor.author/%1$s"; + + private static final String getPath(Object element) { + return String.format(SECTIONS_TRADITIONALPAGEONE_DC_CONTRIBUTOR_AUTHOR, element); + } + @Autowired private RelationshipTypeService relationshipTypeService; @@ -75,6 +86,9 @@ public class PatchMetadataIT extends AbstractEntityIntegrationTest { @Autowired private WorkspaceItemService workspaceItemService; + @Autowired + private ConfigurationService configurationService; + private Collection collection; private Collection collection2; private WorkspaceItem publicationWorkspaceItem; @@ -297,8 +311,6 @@ private void initPlainTextPublicationWorkspace() throws Exception { .withEntityType("Publication") .build(); - String adminToken = getAuthToken(admin.getEmail(), password); - // Make sure we grab the latest instance of the Item from the database before adding a regular author WorkspaceItem publication = workspaceItemService.find(context, publicationWorkspaceItem.getID()); itemService.addMetadata(context, publication.getItem(), @@ -920,6 +932,41 @@ public void replaceTraditionalPageOnePlainTextAuthorThreeTest() throws Exception replaceTraditionalPageOneAuthorTest(3, expectedOrder); } + @Test + public void replaceMultipleTraditionalPageOnePlainTextAuthorTest() throws Exception { + final boolean virtualMetadataEnabled = + configurationService.getBooleanProperty("item.enable-virtual-metadata", false); + + configurationService.setProperty("item.enable-virtual-metadata", false); + try { + initPlainTextPublicationWorkspace(); + + Map replacedAuthors = + Map.of( + 0, authorsOriginalOrder.get(4), + 1, authorsOriginalOrder.get(1), + 2, authorsOriginalOrder.get(2), + 3, authorsOriginalOrder.get(3), + 4, authorsOriginalOrder.get(0) + ); + + List expectedOrder = + List.of( + authorsOriginalOrder.get(4), + authorsOriginalOrder.get(1), + authorsOriginalOrder.get(2), + authorsOriginalOrder.get(3), + authorsOriginalOrder.get(0) + ); + + replaceTraditionalPageMultipleAuthorsTest(replacedAuthors, expectedOrder); + } catch (Exception e) { + throw e; + } finally { + configurationService.setProperty("item.enable-virtual-metadata", virtualMetadataEnabled); + } + } + /** * This test will add an author (dc.contributor.author) within a workspace publication's "traditionalpageone" @@ -1393,24 +1440,7 @@ private void moveTraditionalPageOneAuthorTest(int from, int path, List e ops.add(moveOperation); String patchBody = getPatchContent(ops); - String token = getAuthToken(admin.getEmail(), password); - - getClient(token).perform(patch("/api/submission/workspaceitems/" + publicationWorkspaceItem.getID()) - .content(patchBody) - .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) - .andExpect(status().isOk()); - - String authorField = "dc.contributor.author"; - getClient(token).perform(get("/api/submission/workspaceitems/" + publicationWorkspaceItem.getID())) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$.sections.traditionalpageone", Matchers.allOf( - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(0), 0)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(1), 1)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(2), 2)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(3), 3)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(4), 4)) - ))); + assertReplacementOrder(expectedOrder, patchBody); } /** @@ -1450,33 +1480,66 @@ private void moveMetadataAuthorTest(List moves, List expected * @param expectedOrder A list of author names sorted in the expected order */ private void replaceTraditionalPageOneAuthorTest(int path, List expectedOrder) throws Exception { - List ops = new ArrayList(); - MetadataValueRest value = new MetadataValueRest(); - value.setValue(replacedAuthor); + String patchBody = + getPatchContent( + List.of( + this.mapToReplaceOperation(path, replacedAuthor) + ) + ); + + assertReplacementOrder(expectedOrder, patchBody); + } + + private void replaceTraditionalPageMultipleAuthorsTest( + Map values, List expectedOrder + ) throws Exception { + List ops = + values + .entrySet() + .stream() + .sorted(Comparator.comparing(Map.Entry::getKey)) + .map(entry -> mapToReplaceOperation(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); - ReplaceOperation replaceOperation = new ReplaceOperation("/sections/traditionalpageone/dc.contributor.author/" - + path, value); - ops.add(replaceOperation); String patchBody = getPatchContent(ops); + assertReplacementOrder(expectedOrder, patchBody); + } + + private ReplaceOperation mapToReplaceOperation(int path, String author) { + return new ReplaceOperation(getPath(path), new MetadataValueRest(author)); + } + + private void assertReplacementOrder(List expectedOrder, String patchBody) throws Exception, SQLException { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(patch("/api/submission/workspaceitems/" + publicationWorkspaceItem.getID()) - .content(patchBody) - .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) - .andExpect(status().isOk()); + getClient(token) + .perform( + patch("/api/submission/workspaceitems/" + publicationWorkspaceItem.getID()) + .content(patchBody) + .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON) + ) + .andExpect(status().isOk()); String authorField = "dc.contributor.author"; - getClient(token).perform(get("/api/submission/workspaceitems/" + publicationWorkspaceItem.getID())) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$.sections.traditionalpageone", Matchers.allOf( - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(0), 0)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(1), 1)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(2), 2)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(3), 3)), - Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(4), 4)) - ))); + getClient(token) + .perform(get("/api/submission/workspaceitems/" + publicationWorkspaceItem.getID())) + .andExpect(status().isOk()) + .andExpect( + content().contentType(contentType) + ) + .andExpect( + jsonPath( + "$.sections.traditionalpageone", + Matchers.allOf( + Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(0), 0)), + Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(1), 1)), + Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(2), 2)), + Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(3), 3)), + Matchers.is(MetadataMatcher.matchMetadata(authorField, expectedOrder.get(4), 4)) + ) + ) + ); } /** @@ -1490,8 +1553,7 @@ private void addTraditionalPageOneAuthorTest(String path, List expectedO List ops = new ArrayList(); MetadataValueRest value = new MetadataValueRest(); value.setValue(addedAuthor); - AddOperation addOperation = new AddOperation("/sections/traditionalpageone/dc.contributor.author/" + path, - value); + AddOperation addOperation = new AddOperation(getPath(path), value); ops.add(addOperation); String patchBody = getPatchContent(ops); @@ -1525,8 +1587,7 @@ private void addTraditionalPageOneAuthorTest(String path, List expectedO */ private void removeTraditionalPageOneAuthorTest(int path, List expectedOrder) throws Exception { List ops = new ArrayList(); - RemoveOperation removeOperation = new RemoveOperation("/sections/traditionalpageone/dc.contributor.author/" - + path); + RemoveOperation removeOperation = new RemoveOperation(getPath(path)); ops.add(removeOperation); String patchBody = getPatchContent(ops); @@ -1600,8 +1661,10 @@ private void patchAddEntireArray(List metadataValues) throws Exce * @param path The "path" index to use for the Move operation */ private MoveOperation getTraditionalPageOneMoveAuthorOperation(int from, int path) { - return new MoveOperation("/sections/traditionalpageone/dc.contributor.author/" + path, - "/sections/traditionalpageone/dc.contributor.author/" + from); + return new MoveOperation( + getPath(path), + getPath(from) + ); } /** From 3d0b63e15b473d0140cef5381814ed8db36d022b Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Fri, 24 Mar 2023 11:28:30 -0700 Subject: [PATCH 074/686] Remove unused import --- .../src/main/java/org/dspace/app/rest/utils/Utils.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index 2c83238d8c63..ed6e26ed0fb7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -98,7 +98,6 @@ import org.springframework.data.domain.Pageable; import org.springframework.hateoas.Link; import org.springframework.security.access.AccessDeniedException; -import org.springframework.security.core.parameters.P; import org.springframework.stereotype.Component; import org.springframework.web.multipart.MultipartFile; From 5a327717912f4d6a98a7b7b4d656302a5bb1d328 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Mon, 27 Mar 2023 13:25:01 +0200 Subject: [PATCH 075/686] [DURACOM-92] Fixed CheckerCommand if no bitstream was found --- .../main/java/org/dspace/checker/CheckerCommand.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java index 87b0de4a6571..a12ac3b98a2e 100644 --- a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java +++ b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java @@ -255,10 +255,16 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { if (checksumMap.containsKey("checksum_algorithm")) { info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString()); } + + // compare new checksum to previous checksum + info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); + + } else { + info.setCurrentChecksum(""); + info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); + info.setToBeProcessed(false); } - // compare new checksum to previous checksum - info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); } catch (IOException e) { // bitstream located, but file missing from asset store info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); From 56aca5c14f830ebce89f44737fb2e91b55206ed0 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Mon, 27 Mar 2023 16:44:07 +0200 Subject: [PATCH 076/686] 100302: Fix issue with Pubmed too many requests --- .../service/PubmedImportMetadataSourceServiceImpl.java | 2 +- .../service/components/AbstractRemoteMetadataSource.java | 5 ++++- .../resources/spring/spring-dspace-addon-import-services.xml | 4 +++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index b30ea22ca4e4..f3258c79481f 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -501,4 +501,4 @@ public void setUrlSearch(String urlSearch) { this.urlSearch = urlSearch; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java index 38632a1a2b72..29801433e3b3 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java @@ -183,6 +183,7 @@ protected T retry(Callable callable) throws MetadataSourceException { log.warn("Error in trying operation " + operationId + " " + retry + " " + warning + ", retrying !", e); } finally { + this.lastRequest = System.currentTimeMillis(); lock.unlock(); } @@ -262,5 +263,7 @@ protected void throwSourceExceptionHook() { */ public abstract void init() throws Exception; - + public void setInterRequestTime(final long interRequestTime) { + this.interRequestTime = interRequestTime; + } } diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index d5ba4bd462c4..b77ddec8fb9a 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -49,6 +49,8 @@ + + xml @@ -191,4 +193,4 @@ - \ No newline at end of file + From 6a29d014967e3c44d7f2ecff29d7049e978e082f Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Mon, 27 Mar 2023 17:23:29 +0200 Subject: [PATCH 077/686] 99466: Fixed pubmed date not being saved like the dateFormatsToAttempt specified in pubmed-integration.xml --- .../contributor/PubmedDateMetadatumContributor.java | 8 ++++---- dspace/config/spring/api/pubmed-integration.xml | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index ba2316755300..6536026058ec 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -121,12 +121,14 @@ public Collection contributeMetadata(T t) { int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size() && dcDate == null) { + while (j < dateFormatsToAttempt.size()) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); dcDate = new DCDate(date); + values.add(metadataFieldMapping.toDCValue(field, formatter.format(date))); + break; } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -136,9 +138,7 @@ public Collection contributeMetadata(T t) { } j++; } - if (dcDate != null) { - values.add(metadataFieldMapping.toDCValue(field, dcDate.toString())); - } else { + if (dcDate == null) { log.info( "Failed parsing " + dateString + ", check " + "the configured dataformats in config/spring/api/pubmed-integration.xml"); diff --git a/dspace/config/spring/api/pubmed-integration.xml b/dspace/config/spring/api/pubmed-integration.xml index 83f218c08b05..adec4456ea03 100644 --- a/dspace/config/spring/api/pubmed-integration.xml +++ b/dspace/config/spring/api/pubmed-integration.xml @@ -38,6 +38,7 @@ yyyy-MMM-dd yyyy-MMM yyyy-MM-dd + yyyy @@ -160,4 +161,4 @@ - \ No newline at end of file + From 692c5b5a996f0376b4007df7ba70b7e8115bae10 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 27 Mar 2023 17:57:40 +0200 Subject: [PATCH 078/686] [DURACOM-125][#8736] Check replace operation correctness feat: - Introduced ITs inside RestRepositories to check correctness of multi replacement with indexes. --- .../app/rest/BitstreamRestRepositoryIT.java | 91 +++++++++++++++++++ .../app/rest/BundleRestRepositoryIT.java | 77 ++++++++++++++++ .../app/rest/CollectionRestRepositoryIT.java | 82 ++++++++++++++++- .../app/rest/CommunityRestRepositoryIT.java | 77 ++++++++++++++++ .../app/rest/EPersonRestRepositoryIT.java | 79 +++++++++++++++- .../app/rest/GroupRestRepositoryIT.java | 65 +++++++++++++ .../dspace/app/rest/SiteRestRepositoryIT.java | 81 +++++++++++++++++ 7 files changed, 546 insertions(+), 6 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index f9c1e469fcfe..d6947d756706 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -15,6 +15,7 @@ import static org.hamcrest.Matchers.not; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -30,6 +31,9 @@ import org.dspace.app.rest.matcher.BitstreamMatcher; import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; +import org.dspace.app.rest.matcher.MetadataMatcher; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.authorize.service.ResourcePolicyService; @@ -45,6 +49,7 @@ import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; @@ -1222,6 +1227,92 @@ private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Ex + parentCommunity.getLogo().getID(), expectedStatus); } + @Test + public void patchReplaceMultipleDescriptionBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + + List bitstreamDescriptions = List.of( + "FIRST", + "SECOND", + "THIRD" + ); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = + CommunityBuilder.createSubCommunity(context, parentCommunity).withName("Sub Community").build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + Item publicItem1 = ItemBuilder.createItem(context, col1).withTitle("Test").build(); + + String bitstreamContent = "ThisIsSomeDummyText"; + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + + this.bitstreamService + .addMetadata( + context, bitstream, + MetadataSchemaEnum.DC.getName(), "description", null, + Item.ANY, bitstreamDescriptions + ); + + context.restoreAuthSystemState(); + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(0), 0), + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(1), 1), + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(2), 2) + ) + ) + ); + + List ops = List.of( + new ReplaceOperation("/metadata/dc.description/0", bitstreamDescriptions.get(2)), + new ReplaceOperation("/metadata/dc.description/1", bitstreamDescriptions.get(0)), + new ReplaceOperation("/metadata/dc.description/2", bitstreamDescriptions.get(1)) + ); + String requestBody = getPatchContent(ops); + getClient(token) + .perform(patch("/api/core/bitstreams/" + bitstream.getID()) + .content(requestBody) + .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(1), 2) + ) + ) + ); + getClient(token) + .perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", bitstreamDescriptions.get(1), 2) + ) + ) + ); + } + @Test public void testHiddenMetadataForAnonymousUser() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BundleRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BundleRestRepositoryIT.java index 96385095a200..259580f8c081 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BundleRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BundleRestRepositoryIT.java @@ -37,6 +37,7 @@ import org.dspace.app.rest.model.MetadataValueRest; import org.dspace.app.rest.model.patch.MoveOperation; import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.ResourcePolicyService; @@ -51,6 +52,8 @@ import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.service.BundleService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; @@ -68,6 +71,9 @@ public class BundleRestRepositoryIT extends AbstractControllerIntegrationTest { @Autowired ItemService itemService; + @Autowired + BundleService bundleService; + private Collection collection; private Item item; private Bundle bundle1; @@ -515,6 +521,77 @@ public void patchMoveBitstreams() throws Exception { ))); } + @Test + public void patchReplaceMultipleDescriptionBundle() throws Exception { + context.turnOffAuthorisationSystem(); + + List bundleDescriptions = List.of( + "FIRST", + "SECOND", + "THIRD" + ); + + bundle1 = BundleBuilder.createBundle(context, item) + .withName("testname") + .build(); + + this.bundleService + .addMetadata( + context, bundle1, + MetadataSchemaEnum.DC.getName(), "description", null, + Item.ANY, bundleDescriptions + ); + + context.restoreAuthSystemState(); + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(get("/api/core/bundles/" + bundle1.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(0), 0), + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(1), 1), + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(2), 2) + ) + ) + ); + + List ops = List.of( + new ReplaceOperation("/metadata/dc.description/0", bundleDescriptions.get(2)), + new ReplaceOperation("/metadata/dc.description/1", bundleDescriptions.get(0)), + new ReplaceOperation("/metadata/dc.description/2", bundleDescriptions.get(1)) + ); + String requestBody = getPatchContent(ops); + getClient(token) + .perform(patch("/api/core/bundles/" + bundle1.getID()) + .content(requestBody) + .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(1), 2) + ) + ) + ); + getClient(token) + .perform(get("/api/core/bundles/" + bundle1.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", bundleDescriptions.get(1), 2) + ) + ) + ); + } + @Test public void deleteBundle() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java index ab37fac10654..ee522db170c7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java @@ -69,6 +69,7 @@ import org.dspace.content.Community; import org.dspace.content.EntityType; import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; @@ -499,13 +500,13 @@ public void findOneCollectionGrantAccessAdminsTest() throws Exception { getClient(tokenParentAdmin).perform(get("/api/core/collections/" + col1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.is((CollectionMatcher.matchCollection(col1))))); + Matchers.is(CollectionMatcher.matchCollection(col1)))); String tokenCol1Admin = getAuthToken(col1Admin.getEmail(), "qwerty02"); getClient(tokenCol1Admin).perform(get("/api/core/collections/" + col1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.is((CollectionMatcher.matchCollection(col1))))); + Matchers.is(CollectionMatcher.matchCollection(col1)))); String tokenCol2Admin = getAuthToken(col2Admin.getEmail(), "qwerty03"); getClient(tokenCol2Admin).perform(get("/api/core/collections/" + col1.getID())) @@ -1206,7 +1207,7 @@ public void createTest() throws Exception { ) ))) .andDo(result -> idRef - .set(UUID.fromString(read(result.getResponse().getContentAsString(), "$.id"))));; + .set(UUID.fromString(read(result.getResponse().getContentAsString(), "$.id")))); getClient(authToken).perform(post("/api/core/collections") @@ -3101,6 +3102,81 @@ public void testAdminAuthorizedSearchUnauthenticated() throws Exception { .andExpect(status().isUnauthorized()); } + @Test + public void patchReplaceMultipleDescriptionCollection() throws Exception { + context.turnOffAuthorisationSystem(); + + List collectionDescriptions = List.of( + "FIRST", + "SECOND", + "THIRD" + ); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("MyTest") + .build(); + + this.collectionService + .addMetadata( + context, col, MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY, collectionDescriptions + ); + + context.restoreAuthSystemState(); + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(get("/api/core/collections/" + col.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(0), 0), + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(1), 1), + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(2), 2) + ) + ) + ); + + List ops = List.of( + new ReplaceOperation("/metadata/dc.description/0", collectionDescriptions.get(2)), + new ReplaceOperation("/metadata/dc.description/1", collectionDescriptions.get(0)), + new ReplaceOperation("/metadata/dc.description/2", collectionDescriptions.get(1)) + ); + String requestBody = getPatchContent(ops); + getClient(token) + .perform(patch("/api/core/collections/" + col.getID()) + .content(requestBody) + .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(1), 2) + ) + ) + ); + getClient(token) + .perform(get("/api/core/collections/" + col.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", collectionDescriptions.get(1), 2) + ) + ) + ); + } + @Test public void patchMetadataCheckReindexingTest() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CommunityRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CommunityRestRepositoryIT.java index e084aa174643..30614e6125f2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CommunityRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CommunityRestRepositoryIT.java @@ -20,6 +20,7 @@ import static org.springframework.http.MediaType.parseMediaType; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -44,6 +45,8 @@ import org.dspace.app.rest.model.GroupRest; import org.dspace.app.rest.model.MetadataRest; import org.dspace.app.rest.model.MetadataValueRest; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; @@ -56,6 +59,8 @@ import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.CommunityService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; @@ -1935,6 +1940,78 @@ public void patchCommunityMetadataUnauthorized() throws Exception { runPatchMetadataTests(eperson, 403); } + @Test + public void patchReplaceMultipleDescriptionCommunity() throws Exception { + context.turnOffAuthorisationSystem(); + + List communityDescriptions = List.of( + "FIRST", + "SECOND", + "THIRD" + ); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + this.communityService + .addMetadata( + context, parentCommunity, + MetadataSchemaEnum.DC.getName(), "description", null, + Item.ANY, communityDescriptions + ); + + context.restoreAuthSystemState(); + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(get("/api/core/communities/" + parentCommunity.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(0), 0), + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(1), 1), + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(2), 2) + ) + ) + ); + + List ops = List.of( + new ReplaceOperation("/metadata/dc.description/0", communityDescriptions.get(2)), + new ReplaceOperation("/metadata/dc.description/1", communityDescriptions.get(0)), + new ReplaceOperation("/metadata/dc.description/2", communityDescriptions.get(1)) + ); + String requestBody = getPatchContent(ops); + getClient(token) + .perform(patch("/api/core/communities/" + parentCommunity.getID()) + .content(requestBody) + .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(1), 2) + ) + ) + ); + getClient(token) + .perform(get("/api/core/communities/" + parentCommunity.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", communityDescriptions.get(1), 2) + ) + ) + ); + } + private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception { context.turnOffAuthorisationSystem(); parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java index 8c7f80e5fb59..1f09779ab0e9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java @@ -72,6 +72,7 @@ import org.dspace.builder.WorkflowItemBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.Item; import org.dspace.core.I18nUtil; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; @@ -155,7 +156,7 @@ public void createTest() throws Exception { .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())) .andDo(result -> idRefNoEmbeds - .set(UUID.fromString(read(result.getResponse().getContentAsString(), "$.id"))));; + .set(UUID.fromString(read(result.getResponse().getContentAsString(), "$.id")))); } finally { EPersonBuilder.deleteEPerson(idRef.get()); @@ -1217,7 +1218,7 @@ public void patchCanLoginMissingValue() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.canLogIn", Matchers.is(true)));; + .andExpect(jsonPath("$.canLogIn", Matchers.is(true))); List ops2 = new ArrayList(); @@ -1295,7 +1296,7 @@ public void patchRequireCertificateMissingValue() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.requireCertificate", Matchers.is(true)));; + .andExpect(jsonPath("$.requireCertificate", Matchers.is(true))); List ops2 = new ArrayList(); ReplaceOperation replaceOperation2 = new ReplaceOperation("/certificate",null); @@ -1858,6 +1859,78 @@ public void patchMetadataByAdmin() throws Exception { matchMetadata("eperson.firstname", newName))))); } + @Test + public void patchMultipleReplaceMetadataByAdmin() throws Exception { + + context.turnOffAuthorisationSystem(); + + String first = "First"; + String second = "Second"; + String third = "Third"; + + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withEmail("Johndoe@example.com") + .build(); + + this.ePersonService + .addMetadata(context, ePerson, "eperson", "firstname", null, Item.ANY, List.of(first, second, third)); + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // The replacement of the eperson.firstname value is persisted + getClient(token).perform(get("/api/eperson/epersons/" + ePerson.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("eperson.firstname", first, 0), + MetadataMatcher.matchMetadata("eperson.firstname", second, 1), + MetadataMatcher.matchMetadata("eperson.firstname", third, 2) + ) + ) + ); + + List ops = new ArrayList(); + + ReplaceOperation replaceFirst = new ReplaceOperation("/metadata/eperson.firstname/0", third); + ReplaceOperation replaceSecond = new ReplaceOperation("/metadata/eperson.firstname/1", second); + ReplaceOperation replaceThird = new ReplaceOperation("/metadata/eperson.firstname/2", first); + + ops.add(replaceFirst); + ops.add(replaceSecond); + ops.add(replaceThird); + + String patchBody = getPatchContent(ops); + + getClient(token).perform(patch("/api/eperson/epersons/" + ePerson.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("eperson.firstname", third, 0), + MetadataMatcher.matchMetadata("eperson.firstname", second, 1), + MetadataMatcher.matchMetadata("eperson.firstname", first, 2) + ) + ) + ); + + getClient(token).perform(get("/api/eperson/epersons/" + ePerson.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("eperson.firstname", third, 0), + MetadataMatcher.matchMetadata("eperson.firstname", second, 1), + MetadataMatcher.matchMetadata("eperson.firstname", first, 2) + ) + ) + ); + } + @Test public void patchOwnMetadataByNonAdminUser() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java index 7121e11953a8..fda8b15effa3 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java @@ -40,6 +40,7 @@ import org.dspace.app.rest.matcher.EPersonMatcher; import org.dspace.app.rest.matcher.GroupMatcher; import org.dspace.app.rest.matcher.HalMatcher; +import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.GroupRest; import org.dspace.app.rest.model.MetadataRest; import org.dspace.app.rest.model.MetadataValueRest; @@ -56,6 +57,8 @@ import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; @@ -558,6 +561,68 @@ public void patchGroupName() throws Exception { )); } + @Test + public void patchReplaceMultipleDescriptionGroupName() throws Exception { + context.turnOffAuthorisationSystem(); + List groupDescription = List.of( + "FIRST", + "SECOND", + "THIRD" + ); + + Group group = + GroupBuilder.createGroup(context) + .build(); + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + groupService + .addMetadata( + context, group, MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY, groupDescription + ); + context.restoreAuthSystemState(); + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(get("/api/eperson/groups/" + group.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", groupDescription.get(0), 0), + MetadataMatcher.matchMetadata("dc.description", groupDescription.get(1), 1), + MetadataMatcher.matchMetadata("dc.description", groupDescription.get(2), 2) + ) + ) + ); + + List ops = List.of( + new ReplaceOperation("/metadata/dc.description/0", groupDescription.get(2)), + new ReplaceOperation("/metadata/dc.description/1", groupDescription.get(0)), + new ReplaceOperation("/metadata/dc.description/2", groupDescription.get(1)) + ); + String requestBody = getPatchContent(ops); + + getClient(token) + .perform( + patch("/api/eperson/groups/" + group.getID()) + .content(requestBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON) + ) + .andExpect(status().isOk()); + + getClient(token) + .perform(get("/api/eperson/groups/" + group.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", groupDescription.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", groupDescription.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", groupDescription.get(1), 2) + ) + ) + ); + } + @Test public void patchGroupWithParentUnprocessable() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SiteRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SiteRestRepositoryIT.java index 092ea32b3f2a..26b01071d179 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SiteRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SiteRestRepositoryIT.java @@ -9,22 +9,34 @@ import static org.hamcrest.Matchers.is; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.util.List; import java.util.UUID; +import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.matcher.SiteMatcher; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.builder.SiteBuilder; +import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.Site; +import org.dspace.content.service.SiteService; import org.dspace.eperson.EPerson; import org.hamcrest.Matchers; import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; public class SiteRestRepositoryIT extends AbstractControllerIntegrationTest { + @Autowired + private SiteService siteService; + @Test public void findAll() throws Exception { @@ -77,6 +89,75 @@ public void patchSiteMetadataUnauthorized() throws Exception { runPatchMetadataTests(eperson, 403); } + @Test + public void patchReplaceMultipleDescriptionSite() throws Exception { + context.turnOffAuthorisationSystem(); + + List siteDescriptions = List.of( + "FIRST", + "SECOND", + "THIRD" + ); + + Site site = SiteBuilder.createSite(context).build(); + + this.siteService + .addMetadata( + context, site, + MetadataSchemaEnum.DC.getName(), "description", null, + Item.ANY, siteDescriptions + ); + + context.restoreAuthSystemState(); + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(get("/api/core/sites/" + site.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(0), 0), + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(1), 1), + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(2), 2) + ) + ) + ); + + List ops = List.of( + new ReplaceOperation("/metadata/dc.description/0", siteDescriptions.get(2)), + new ReplaceOperation("/metadata/dc.description/1", siteDescriptions.get(0)), + new ReplaceOperation("/metadata/dc.description/2", siteDescriptions.get(1)) + ); + String requestBody = getPatchContent(ops); + getClient(token) + .perform(patch("/api/core/sites/" + site.getID()) + .content(requestBody) + .contentType(javax.ws.rs.core.MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(1), 2) + ) + ) + ); + getClient(token) + .perform(get("/api/core/sites/" + site.getID())) + .andExpect(status().isOk()) + .andExpect( + jsonPath("$.metadata", + Matchers.allOf( + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(2), 0), + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(0), 1), + MetadataMatcher.matchMetadata("dc.description", siteDescriptions.get(1), 2) + ) + ) + ); + } + private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception { context.turnOffAuthorisationSystem(); Site site = SiteBuilder.createSite(context).build(); From 0133be8a49316dae40b44e6d8b91b4836323b550 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Tue, 28 Mar 2023 11:16:18 +0200 Subject: [PATCH 079/686] 99466: Fixed ArXiv date not being formatted as a date but just being saved as a plain string --- dspace/config/spring/api/arxiv-integration.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace/config/spring/api/arxiv-integration.xml b/dspace/config/spring/api/arxiv-integration.xml index e963e73a2055..59594b08fa5e 100644 --- a/dspace/config/spring/api/arxiv-integration.xml +++ b/dspace/config/spring/api/arxiv-integration.xml @@ -56,10 +56,12 @@ - + + + From c1731f11ee1d4e3259d2c7f9a81c7d38b441dd2e Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 28 Mar 2023 11:44:12 -0500 Subject: [PATCH 080/686] Fix date tests to use our DateMatcher --- .../rest/SystemWideAlertRestRepositoryIT.java | 63 +++++++++---------- 1 file changed, 28 insertions(+), 35 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SystemWideAlertRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SystemWideAlertRestRepositoryIT.java index beb979dfe68f..522c47670429 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SystemWideAlertRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SystemWideAlertRestRepositoryIT.java @@ -9,22 +9,22 @@ import static com.jayway.jsonpath.JsonPath.read; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.matcher.DateMatcher.dateMatcher; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.startsWith; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import java.text.DateFormat; -import java.text.SimpleDateFormat; +import java.util.Calendar; import java.util.Date; import java.util.concurrent.atomic.AtomicReference; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.time.DateUtils; import org.dspace.alerts.AllowSessionsEnum; import org.dspace.alerts.SystemWideAlert; import org.dspace.app.rest.model.SystemWideAlertRest; @@ -42,11 +42,11 @@ public void findAllTest() throws Exception { // Create two alert entries in the db to fully test the findAll method // Note: It is not possible to create two alerts through the REST API context.turnOffAuthorisationSystem(); - Date countdownDate = new Date(); + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); SystemWideAlert systemWideAlert1 = SystemWideAlertBuilder.createSystemWideAlert(context, "Test alert 1") .withAllowSessions( AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY) - .withCountdownDate(countdownDate) + .withCountdownDate(dateToNearestSecond) .isActive(true) .build(); @@ -58,8 +58,6 @@ public void findAllTest() throws Exception { .build(); context.restoreAuthSystemState(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); - String authToken = getAuthToken(admin.getEmail(), password); getClient(authToken).perform(get("/api/system/systemwidealerts/")) @@ -69,8 +67,7 @@ public void findAllTest() throws Exception { hasJsonPath("$.alertId", is(systemWideAlert1.getID())), hasJsonPath("$.message", is(systemWideAlert1.getMessage())), hasJsonPath("$.allowSessions", is(systemWideAlert1.getAllowSessions().getValue())), - hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlert1.getCountdownTo()))), + hasJsonPath("$.countdownTo", dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlert1.isActive())) ), allOf( @@ -141,11 +138,11 @@ public void findOneTest() throws Exception { // Create two alert entries in the db to fully test the findOne method // Note: It is not possible to create two alerts through the REST API context.turnOffAuthorisationSystem(); - Date countdownDate = new Date(); + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); SystemWideAlert systemWideAlert1 = SystemWideAlertBuilder.createSystemWideAlert(context, "Test alert 1") .withAllowSessions( AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY) - .withCountdownDate(countdownDate) + .withCountdownDate(dateToNearestSecond) .isActive(true) .build(); SystemWideAlert systemWideAlert2 = SystemWideAlertBuilder.createSystemWideAlert(context, "Test alert 2") @@ -156,7 +153,6 @@ public void findOneTest() throws Exception { .build(); context.restoreAuthSystemState(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); String authToken = getAuthToken(admin.getEmail(), password); @@ -170,7 +166,7 @@ public void findOneTest() throws Exception { hasJsonPath("$.allowSessions", is(systemWideAlert1.getAllowSessions().getValue())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlert1.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlert1.isActive())) ) )); @@ -183,11 +179,11 @@ public void findOneUnauthorizedTest() throws Exception { // Create two alert entries in the db to fully test the findOne method // Note: It is not possible to create two alerts through the REST API context.turnOffAuthorisationSystem(); - Date countdownDate = new Date(); + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); SystemWideAlert systemWideAlert1 = SystemWideAlertBuilder.createSystemWideAlert(context, "Test alert 1") .withAllowSessions( AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY) - .withCountdownDate(countdownDate) + .withCountdownDate(dateToNearestSecond) .isActive(true) .build(); @@ -199,8 +195,6 @@ public void findOneUnauthorizedTest() throws Exception { .build(); context.restoreAuthSystemState(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); - // When the alert is active and the user is not an admin, the user will be able to see the alert getClient().perform(get("/api/system/systemwidealerts/" + systemWideAlert1.getID())) .andExpect(status().isOk()) @@ -211,7 +205,7 @@ public void findOneUnauthorizedTest() throws Exception { hasJsonPath("$.allowSessions", is(systemWideAlert1.getAllowSessions().getValue())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlert1.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlert1.isActive())) ) )); @@ -228,11 +222,11 @@ public void findOneForbiddenTest() throws Exception { // Create two alert entries in the db to fully test the findOne method // Note: It is not possible to create two alerts through the REST API context.turnOffAuthorisationSystem(); - Date countdownDate = new Date(); + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); SystemWideAlert systemWideAlert1 = SystemWideAlertBuilder.createSystemWideAlert(context, "Test alert 1") .withAllowSessions( AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY) - .withCountdownDate(countdownDate) + .withCountdownDate(dateToNearestSecond) .isActive(true) .build(); @@ -244,7 +238,6 @@ public void findOneForbiddenTest() throws Exception { .build(); context.restoreAuthSystemState(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); String authToken = getAuthToken(eperson.getEmail(), password); @@ -257,7 +250,7 @@ public void findOneForbiddenTest() throws Exception { hasJsonPath("$.allowSessions", is(systemWideAlert1.getAllowSessions().getValue())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlert1.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlert1.isActive())) ) )); @@ -274,11 +267,11 @@ public void findAllActiveTest() throws Exception { // Create three alert entries in the db to fully test the findActive search method // Note: It is not possible to create two alerts through the REST API context.turnOffAuthorisationSystem(); - Date countdownDate = new Date(); + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); SystemWideAlert systemWideAlert1 = SystemWideAlertBuilder.createSystemWideAlert(context, "Test alert 1") .withAllowSessions( AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY) - .withCountdownDate(countdownDate) + .withCountdownDate(dateToNearestSecond) .isActive(true) .build(); @@ -297,8 +290,6 @@ public void findAllActiveTest() throws Exception { .build(); context.restoreAuthSystemState(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); - getClient().perform(get("/api/system/systemwidealerts/search/active")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.systemwidealerts", containsInAnyOrder( @@ -307,7 +298,7 @@ public void findAllActiveTest() throws Exception { hasJsonPath("$.message", is(systemWideAlert1.getMessage())), hasJsonPath("$.allowSessions", is(systemWideAlert1.getAllowSessions().getValue())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlert1.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlert1.isActive())) ), allOf( @@ -323,9 +314,11 @@ public void findAllActiveTest() throws Exception { @Test public void createTest() throws Exception { + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); + SystemWideAlertRest systemWideAlertRest = new SystemWideAlertRest(); systemWideAlertRest.setMessage("Alert test message"); - systemWideAlertRest.setCountdownTo(new Date()); + systemWideAlertRest.setCountdownTo(dateToNearestSecond); systemWideAlertRest.setAllowSessions(AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY.getValue()); systemWideAlertRest.setActive(true); @@ -336,7 +329,6 @@ public void createTest() throws Exception { AtomicReference idRef = new AtomicReference<>(); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); getClient(authToken).perform(post("/api/system/systemwidealerts/") .content(mapper.writeValueAsBytes(systemWideAlertRest)) .contentType(contentType)) @@ -348,7 +340,7 @@ public void createTest() throws Exception { hasJsonPath("$.allowSessions", is(systemWideAlertRest.getAllowSessions())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlertRest.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlertRest.isActive())) ) )) @@ -363,7 +355,7 @@ public void createTest() throws Exception { hasJsonPath("$.message", is(systemWideAlertRest.getMessage())), hasJsonPath("$.allowSessions", is(systemWideAlertRest.getAllowSessions())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlertRest.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlertRest.isActive())) ) )); @@ -450,10 +442,12 @@ public void putTest() throws Exception { .build(); context.restoreAuthSystemState(); + Date dateToNearestSecond = DateUtils.round(new Date(), Calendar.SECOND); + SystemWideAlertRest systemWideAlertRest = new SystemWideAlertRest(); systemWideAlertRest.setAlertId(systemWideAlert.getID()); systemWideAlertRest.setMessage("Updated alert test message"); - systemWideAlertRest.setCountdownTo(new Date()); + systemWideAlertRest.setCountdownTo(dateToNearestSecond); systemWideAlertRest.setAllowSessions(AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY.getValue()); systemWideAlertRest.setActive(true); @@ -462,7 +456,6 @@ public void putTest() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); - DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); getClient(authToken).perform(put("/api/system/systemwidealerts/" + systemWideAlert.getID()) .content(mapper.writeValueAsBytes(systemWideAlertRest)) .contentType(contentType)) @@ -474,7 +467,7 @@ public void putTest() throws Exception { hasJsonPath("$.allowSessions", is(systemWideAlertRest.getAllowSessions())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlertRest.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlertRest.isActive())) ) )); @@ -487,7 +480,7 @@ public void putTest() throws Exception { hasJsonPath("$.message", is(systemWideAlertRest.getMessage())), hasJsonPath("$.allowSessions", is(systemWideAlertRest.getAllowSessions())), hasJsonPath("$.countdownTo", - startsWith(sdf.format(systemWideAlertRest.getCountdownTo()))), + dateMatcher(dateToNearestSecond)), hasJsonPath("$.active", is(systemWideAlertRest.isActive())) ) )); From db3ebe84bde52c16ec53dc7af3cfd0a97981c727 Mon Sep 17 00:00:00 2001 From: "max.nuding" Date: Wed, 29 Mar 2023 15:12:07 +0200 Subject: [PATCH 081/686] Change description column in orcid tables to TEXT type --- ...cid_queue_and_history_descriptions_to_text_type.sql | 10 ++++++++++ ...cid_queue_and_history_descriptions_to_text_type.sql | 10 ++++++++++ ...cid_queue_and_history_descriptions_to_text_type.sql | 10 ++++++++++ 3 files changed, 30 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..7641eb9fc2c0 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description SET DATA TYPE CLOB; +ALTER TABLE orcid_queue ALTER COLUMN description SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..509e0a286935 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history MODIFY (description CLOB); +ALTER TABLE orcid_queue MODIFY (description CLOB); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..ae0e414e4440 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description TYPE TEXT; +ALTER TABLE orcid_queue ALTER COLUMN description TYPE TEXT; From aa45a407ba3510604641a7a7550b2728a2217766 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 29 Mar 2023 17:30:59 +0100 Subject: [PATCH 082/686] Fix for #8732 and #8750 --- .../VersionedDOIIdentifierProvider.java | 18 +++++++++--------- .../repository/IdentifierRestRepository.java | 3 ++- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index e7c786d5f8ce..a246906b545b 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -66,7 +66,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) try { history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { - throw new RuntimeException("A problem occured while accessing the database.", ex); + throw new RuntimeException("A problem occurred while accessing the database.", ex); } String doi = null; @@ -76,7 +76,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) return doi; } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " + log.error("Error while attempting to retrieve information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " @@ -126,7 +126,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) "A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex); throw new RuntimeException("A problem with the database connection occured.", ex); } - return versionedDOI; + return DOI.SCHEME + versionedDOI; } } @@ -134,7 +134,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) if (history != null) { // versioning is currently supported for items only // if we have a history, we have a item - doi = makeIdentifierBasedOnHistory(context, dso, history); + doi = makeIdentifierBasedOnHistory(context, dso, history, filter); } else { doi = loadOrCreateDOI(context, dso, null, filter).getDoi(); } @@ -145,7 +145,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return doi; + return DOI.SCHEME + doi; } @Override @@ -162,7 +162,7 @@ public void register(Context context, DSpaceObject dso, String identifier, Filte Item item = (Item) dso; if (StringUtils.isEmpty(identifier)) { - identifier = mint(context, dso); + identifier = mint(context, dso, filter); } String doiIdentifier = doiService.formatIdentifier(identifier); @@ -170,10 +170,10 @@ public void register(Context context, DSpaceObject dso, String identifier, Filte // search DOI in our db try { - doi = loadOrCreateDOI(context, dso, doiIdentifier); + doi = loadOrCreateDOI(context, dso, doiIdentifier, filter); } catch (SQLException ex) { - log.error("Error in databse connection: " + ex.getMessage(), ex); - throw new RuntimeException("Error in database conncetion.", ex); + log.error("Error in database connection: " + ex.getMessage(), ex); + throw new RuntimeException("Error in database connection.", ex); } if (DELETED.equals(doi.getStatus()) || diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java index 1be569d18e5d..b4a9688942c1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java @@ -234,7 +234,8 @@ private IdentifierRest registerDOI(Context context, Item item) DOIIdentifierProvider doiIdentifierProvider = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName("org.dspace.identifier.DOIIdentifierProvider", DOIIdentifierProvider.class); if (doiIdentifierProvider != null) { - String doiValue = doiIdentifierProvider.register(context, item, new TrueFilter()); + String doiValue = doiIdentifierProvider.mint(context, item, new TrueFilter()); + doiIdentifierProvider.register(context, item, doiValue, new TrueFilter()); identifierRest.setValue(doiValue); // Get new status DOI doi = doiService.findByDoi(context, doiValue); From 8989d5ad1620cc6e159554287e7c4cae3bcf20ca Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 29 Mar 2023 20:44:34 +0100 Subject: [PATCH 083/686] Update VersionedDOIIdentifierProvider.java getBareDOI method already adds doi: SCHEME --- .../org/dspace/identifier/VersionedDOIIdentifierProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index a246906b545b..7151f6a492f8 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -126,7 +126,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) "A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex); throw new RuntimeException("A problem with the database connection occured.", ex); } - return DOI.SCHEME + versionedDOI; + return versionedDOI; } } From ca66fc7309484169b9442716a8887981c870767d Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 29 Mar 2023 21:05:07 +0100 Subject: [PATCH 084/686] Update VersionedDOIIdentifierProvider.java Include check for DOI.SCHEME before returning DOI --- .../org/dspace/identifier/VersionedDOIIdentifierProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 7151f6a492f8..a933e85d30dd 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -145,7 +145,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return DOI.SCHEME + doi; + return doi.startsWith(DOI.SCHEME) ? doi : DOI.SCHEME + doi; } @Override From 3738d21e0dcd05a6a484bcbfff97135dfd946315 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Thu, 30 Mar 2023 09:34:25 +0200 Subject: [PATCH 085/686] [DURACOM-128] Unable to delete a profile --- .../src/main/java/org/dspace/identifier/doi/DOIConsumer.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java index 1961ce82744c..33ef058e1696 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java @@ -141,7 +141,6 @@ public void consume(Context ctx, Event event) throws Exception { + item.getID() + " and DOI " + doi + ".", ex); } } - ctx.commit(); } } From a11ed8a0d3f778f8e937512e726d71e28b577349 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Thu, 30 Mar 2023 15:38:09 +0200 Subject: [PATCH 086/686] 100553: Sort the queried metadata fields ASC to always display exact matches on top (this can otherwise lead to angular errors) --- .../discovery/indexobject/MetadataFieldIndexFactoryImpl.java | 1 + .../dspace/app/rest/repository/MetadataFieldRestRepository.java | 1 + 2 files changed, 2 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java index 518a8ff14561..bef44326fe75 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java @@ -64,6 +64,7 @@ public SolrInputDocument buildDocument(Context context, IndexableMetadataField i Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); // add read permission on doc for anonymous group doc.addField("read", "g" + anonymousGroup.getID()); + doc.addField(FIELD_NAME_VARIATIONS + "_sort", fieldName); return doc; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 157a80e264b5..b5d12f1d4547 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -210,6 +210,7 @@ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, St DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()])); + discoverQuery.setSortField("fieldName_sort", DiscoverQuery.SORT_ORDER.asc); return discoverQuery; } From 0f8b9707f08b9e2bf7b45ad433f94c6e80e3e93f Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Thu, 30 Mar 2023 15:42:18 +0200 Subject: [PATCH 087/686] 100553: Fixed the pagination for core/metadatafield/byFieldName rest endpoint --- .../rest/repository/MetadataFieldRestRepository.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index b5d12f1d4547..65e50005b5b8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -135,13 +135,14 @@ public Page findByFieldName(@Parameter(value = "schema", requ @Parameter(value = "exactName", required = false) String exactName, Pageable pageable) throws SQLException { Context context = obtainContext(); + long totalElements = 0; List matchingMetadataFields = new ArrayList<>(); if (StringUtils.isBlank(exactName)) { // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query); + this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable); try { DiscoverResult searchResult = searchService.search(context, null, discoverQuery); for (IndexableObject object : searchResult.getIndexableObjects()) { @@ -149,6 +150,7 @@ public Page findByFieldName(@Parameter(value = "schema", requ matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject()); } } + totalElements = searchResult.getTotalSearchResults(); } catch (SearchServiceException e) { log.error("Error while searching with Discovery", e); throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage()); @@ -163,10 +165,11 @@ public Page findByFieldName(@Parameter(value = "schema", requ MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.'); if (exactMatchingMdField != null) { matchingMetadataFields.add(exactMatchingMdField); + totalElements = 1; } } - return converter.toRestPage(matchingMetadataFields, pageable, utils.obtainProjection()); + return converter.toRestPage(matchingMetadataFields, pageable, totalElements, utils.obtainProjection()); } /** @@ -182,7 +185,7 @@ public Page findByFieldName(@Parameter(value = "schema", requ * @throws SQLException If DB error */ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName, - String qualifierName, String query) throws SQLException { + String qualifierName, String query, Pageable pageable) throws SQLException { List filterQueries = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { if (query.split("\\.").length > 3) { @@ -211,6 +214,8 @@ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, St DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()])); discoverQuery.setSortField("fieldName_sort", DiscoverQuery.SORT_ORDER.asc); + discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); + discoverQuery.setMaxResults(pageable.getPageSize()); return discoverQuery; } From b43a92062f44f1a583a3e69f6af1af0fcbb063f6 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 30 Mar 2023 16:28:31 +0200 Subject: [PATCH 088/686] 100302: Add delays between specific Pubmed calls --- ...PubmedImportMetadataSourceServiceImpl.java | 24 +++++++++++++++++++ .../spring-dspace-addon-import-services.xml | 1 - 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index f3258c79481f..933d6b1446d8 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -292,7 +292,14 @@ public Collection call() throws Exception { int countAttempt = 0; while (StringUtils.isBlank(response) && countAttempt <= attempt) { countAttempt++; + + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response)) { @@ -316,7 +323,13 @@ public Collection call() throws Exception { countAttempt = 0; while (StringUtils.isBlank(response2) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response2)) { @@ -418,7 +431,13 @@ public Collection call() throws Exception { int countAttempt = 0; while (StringUtils.isBlank(response) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response)) { @@ -441,7 +460,12 @@ public Collection call() throws Exception { countAttempt = 0; while (StringUtils.isBlank(response2) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response2)) { diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index b77ddec8fb9a..a75340e91df3 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -50,7 +50,6 @@ - xml From 96b2437e245d3f3cb9406c82399a64ceefa6adae Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Thu, 30 Mar 2023 17:26:29 +0200 Subject: [PATCH 089/686] 100553: Added backend validation on schema, element and qualifier to check if they contain dots --- .../app/rest/repository/MetadataFieldRestRepository.java | 4 ++++ .../app/rest/repository/MetadataSchemaRestRepository.java | 2 ++ 2 files changed, 6 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 65e50005b5b8..0396a8ad672d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -253,10 +253,14 @@ protected MetadataFieldRest createAndReturn(Context context) if (isBlank(metadataFieldRest.getElement())) { throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); + } else if (metadataFieldRest.getElement().contains(".")) { + throw new DSpaceBadRequestException("metadata element (in request body) cannot contain dots"); } if (isBlank(metadataFieldRest.getQualifier())) { metadataFieldRest.setQualifier(null); + } else if (metadataFieldRest.getQualifier().contains(".")) { + throw new DSpaceBadRequestException("metadata qualifier (in request body) cannot contain dots"); } // create diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java index 2865a2f1dff2..aeb1d5c616f6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java @@ -96,6 +96,8 @@ protected MetadataSchemaRest createAndReturn(Context context) } if (isBlank(metadataSchemaRest.getNamespace())) { throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); + } else if (metadataSchemaRest.getNamespace().contains(".")) { + throw new UnprocessableEntityException("metadata schema namespace cannot contain dots"); } // create From de46faf23e5208fd7fc0716d371627e6e0b26b0a Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 30 Mar 2023 17:39:07 +0200 Subject: [PATCH 090/686] 100302: Add debugging logs for pubmed requests --- .../service/LiveImportClientImpl.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java index 81a6631127ac..1a8a7a7861ed 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java @@ -60,7 +60,8 @@ public String executeHttpGetRequest(int timeout, String URL, Map headerParams = params.get(HEADER_PARAMETERS); @@ -71,7 +72,9 @@ public String executeHttpGetRequest(int timeout, String URL, Map Date: Fri, 31 Mar 2023 08:32:12 +0200 Subject: [PATCH 091/686] Add CLOB column annotations --- dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java | 2 ++ dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java index 33edea112e76..a567c6e7a7fe 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -79,6 +79,8 @@ public class OrcidHistory implements ReloadableEntity { /** * A description of the synchronized resource. */ + @Lob + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") @Column(name = "description") private String description; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java index 4794e89008c3..9261f14eea24 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -64,6 +64,8 @@ public class OrcidQueue implements ReloadableEntity { /** * A description of the resource to be synchronized. */ + @Lob + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") @Column(name = "description") private String description; From d5c001463ab92f2db5393253cfd070bf91ff084a Mon Sep 17 00:00:00 2001 From: "max.nuding" Date: Fri, 31 Mar 2023 08:49:53 +0200 Subject: [PATCH 092/686] Add integration tests for orcid bulk push with long titles --- .../dspace/orcid/script/OrcidBulkPushIT.java | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java index db66f6c7aa33..e6ca2a3d9e7e 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java @@ -215,6 +215,62 @@ public void testWithManyOrcidQueueRecords() throws Exception { } + @Test + public void testWithVeryLongTitleQueueRecords() throws Exception { + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item firstEntity = createPublication("Publication with a very very very very very very very very very " + + "very very very very very very very very very very very very very very very very very very very very " + + "very very very very very very very very very very very very very very very very very even " + + "extremely long title"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.update(any(), eq("0000-1111-2222-3333"), any(), eq("98765"))) + .thenReturn(updatedResponse("98765")); + + when(orcidClientMock.deleteByPutCode( + any(), + eq("0000-1111-2222-3333"), + eq("22222"), + eq("/work")) + ).thenReturn(deletedResponse()); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, "Description", "Publication", "22222"); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + String firstProfileItemId = firstProfileItem.getID().toString(); + + assertThat(handler.getInfoMessages(), hasSize(5)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItemId, + "History record created with status 201. The operation was completed successfully", + "Deletion of Publication for profile with ID: " + firstProfileItemId + " by put code 22222", + "History record created with status 204. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).deleteByPutCode( + any(), + eq("0000-1111-2222-3333"), + eq("22222"), + eq("/work")); + + verifyNoMoreInteractions(orcidClientMock); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, 204, DELETE)))); + } + @Test public void testWithOneValidationError() throws Exception { From 855ca64c1e0d8c4eb18ce83ce84a5f5b0bf7cc0a Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 16 Mar 2023 10:11:36 +0300 Subject: [PATCH 093/686] dspace: Change description for image/png to "PNG" "image/png" is the mime type, not the short description. --- dspace/config/registries/bitstream-formats.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/registries/bitstream-formats.xml b/dspace/config/registries/bitstream-formats.xml index 076959a31905..b85396c46ab4 100644 --- a/dspace/config/registries/bitstream-formats.xml +++ b/dspace/config/registries/bitstream-formats.xml @@ -201,7 +201,7 @@ image/png - image/png + PNG Portable Network Graphics 1 false From 00313ad9250bc1240b421dde6c1d8bcf5d11e3db Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 16 Mar 2023 10:14:05 +0300 Subject: [PATCH 094/686] dspace.cfg: update name of PNG format We should use the short description, not the mime type, as with all other image types. --- dspace/config/dspace.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 2a35e894593a..5d69d9295bf1 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -492,9 +492,9 @@ filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = OpenDo filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = OpenDocument Text filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = RTF filter.org.dspace.app.mediafilter.TikaTextExtractionFilter.inputFormats = Text -filter.org.dspace.app.mediafilter.JPEGFilter.inputFormats = BMP, GIF, JPEG, image/png -filter.org.dspace.app.mediafilter.BrandedPreviewJPEGFilter.inputFormats = BMP, GIF, JPEG, image/png -filter.org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter.inputFormats = BMP, GIF, image/png, JPG, TIFF, JPEG, JPEG 2000 +filter.org.dspace.app.mediafilter.JPEGFilter.inputFormats = BMP, GIF, JPEG, PNG +filter.org.dspace.app.mediafilter.BrandedPreviewJPEGFilter.inputFormats = BMP, GIF, JPEG, PNG +filter.org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter.inputFormats = BMP, GIF, PNG, JPG, TIFF, JPEG, JPEG 2000 filter.org.dspace.app.mediafilter.ImageMagickPdfThumbnailFilter.inputFormats = Adobe PDF filter.org.dspace.app.mediafilter.PDFBoxThumbnail.inputFormats = Adobe PDF From 3f8970048b993fb60abd96035eee12d3fc411c33 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 16 Mar 2023 10:14:36 +0300 Subject: [PATCH 095/686] dspace: add WebP and AVIF to bitstream format registry WebP and AVIF are modern image formats that have been around for a few years and have become increasingly widely supported. --- dspace/config/registries/bitstream-formats.xml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/dspace/config/registries/bitstream-formats.xml b/dspace/config/registries/bitstream-formats.xml index b85396c46ab4..ab9e9687d7d1 100644 --- a/dspace/config/registries/bitstream-formats.xml +++ b/dspace/config/registries/bitstream-formats.xml @@ -800,4 +800,22 @@ mp3 + + image/webp + WebP + WebP is a modern image format that provides superior lossless and lossy compression for images on the web. + 1 + false + webp + + + + image/avif + AVIF + AV1 Image File Format (AVIF) is an open, royalty-free image file format specification for storing images or image sequences compressed with AV1 in the HEIF container format. + 1 + false + avif + + From f384eaaf913d5086c4efd0a19873c4fc2b32bf51 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 29 Mar 2023 14:40:05 +0300 Subject: [PATCH 096/686] dspace-server-webapp: Update IT for bitstream format registry Since we added WebP and AVIF we need to increase the expected numb- er of formats by two. --- .../org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java index d5798ba5a3f0..f566b2fe638c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java @@ -56,7 +56,7 @@ public class BitstreamFormatRestRepositoryIT extends AbstractControllerIntegrati @Autowired private BitstreamFormatConverter bitstreamFormatConverter; - private final int DEFAULT_AMOUNT_FORMATS = 82; + private final int DEFAULT_AMOUNT_FORMATS = 84; @Test public void findAllPaginationTest() throws Exception { From e777142502b8c2d5da59d32a96adf244202994d2 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Fri, 24 Mar 2023 11:55:01 +0300 Subject: [PATCH 097/686] dspace-api: Add SQL migration for PNG Add a Flyway SQL migration to update the description of PNG in the bitstream format registry. --- ..._Update_PNG_in_bitstream_format_registry.sql | 17 +++++++++++++++++ ..._Update_PNG_in_bitstream_format_registry.sql | 17 +++++++++++++++++ ..._Update_PNG_in_bitstream_format_registry.sql | 17 +++++++++++++++++ 3 files changed, 51 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql new file mode 100644 index 000000000000..8aec44a7f6f2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 +----------------------------------------------------------------------------------- + +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql new file mode 100644 index 000000000000..8aec44a7f6f2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 +----------------------------------------------------------------------------------- + +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql new file mode 100644 index 000000000000..8aec44a7f6f2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 +----------------------------------------------------------------------------------- + +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; From a26f6bc80ed040f74d8dd2e0b2ae5dca86b02555 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Mon, 3 Apr 2023 13:05:50 +0200 Subject: [PATCH 098/686] 100553: Removed possibility to updated schema name, element and qualifier --- .../MetadataFieldRestRepository.java | 20 +++-- .../MetadataSchemaRestRepository.java | 15 ++-- .../rest/MetadataSchemaRestRepositoryIT.java | 30 ++++++- .../rest/MetadatafieldRestRepositoryIT.java | 86 +++++++++++++++---- 4 files changed, 114 insertions(+), 37 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 0396a8ad672d..c185e8334219 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -313,21 +313,23 @@ protected MetadataFieldRest put(Context context, HttpServletRequest request, Str throw new UnprocessableEntityException("Cannot parse JSON in request body", e); } - if (metadataFieldRest == null || isBlank(metadataFieldRest.getElement())) { - throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); + MetadataField metadataField = metadataFieldService.find(context, id); + if (metadataField == null) { + throw new ResourceNotFoundException("metadata field with id: " + id + " not found"); } - if (!Objects.equals(id, metadataFieldRest.getId())) { - throw new UnprocessableEntityException("ID in request body doesn't match path ID"); + if (!Objects.equals(metadataFieldRest.getElement(), metadataField.getElement())) { + throw new UnprocessableEntityException("Metadata element cannot be updated."); } - MetadataField metadataField = metadataFieldService.find(context, id); - if (metadataField == null) { - throw new ResourceNotFoundException("metadata field with id: " + id + " not found"); + if (!Objects.equals(metadataFieldRest.getQualifier(), metadataField.getQualifier())) { + throw new UnprocessableEntityException("Metadata qualifier cannot be updated."); + } + + if (!Objects.equals(id, metadataFieldRest.getId())) { + throw new UnprocessableEntityException("ID in request body doesn't match path ID"); } - metadataField.setElement(metadataFieldRest.getElement()); - metadataField.setQualifier(metadataFieldRest.getQualifier()); metadataField.setScopeNote(metadataFieldRest.getScopeNote()); try { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java index aeb1d5c616f6..c45ac9821ff6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java @@ -147,8 +147,13 @@ protected MetadataSchemaRest put(Context context, HttpServletRequest request, St throw new UnprocessableEntityException("Cannot parse JSON in request body", e); } - if (metadataSchemaRest == null || isBlank(metadataSchemaRest.getPrefix())) { - throw new UnprocessableEntityException("metadata schema name cannot be blank"); + MetadataSchema metadataSchema = metadataSchemaService.find(context, id); + if (metadataSchema == null) { + throw new ResourceNotFoundException("metadata schema with id: " + id + " not found"); + } + + if (!Objects.equals(metadataSchemaRest.getPrefix(), metadataSchema.getName())) { + throw new UnprocessableEntityException("Metadata schema name cannot be updated."); } if (isBlank(metadataSchemaRest.getNamespace())) { throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); @@ -158,12 +163,6 @@ protected MetadataSchemaRest put(Context context, HttpServletRequest request, St throw new UnprocessableEntityException("ID in request doesn't match path ID"); } - MetadataSchema metadataSchema = metadataSchemaService.find(context, id); - if (metadataSchema == null) { - throw new ResourceNotFoundException("metadata schema with id: " + id + " not found"); - } - - metadataSchema.setName(metadataSchemaRest.getPrefix()); metadataSchema.setNamespace(metadataSchemaRest.getNamespace()); try { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index f1a1a095b16e..02d51fb443af 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -202,7 +202,7 @@ public void update() throws Exception { MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); metadataSchemaRest.setId(metadataSchema.getID()); - metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setPrefix(TEST_NAME); metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) @@ -214,7 +214,33 @@ public void update() throws Exception { getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", MetadataschemaMatcher - .matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED))); + .matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED))); + } + + @Test + public void update_schemaNameShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE) + .build(); + + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); + metadataSchemaRest.setId(metadataSchema.getID()); + metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadataschemas/" + metadataSchema.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataschemaMatcher + .matchEntry(TEST_NAME, TEST_NAMESPACE))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index 56c8f637f10d..b85dade17043 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -832,31 +832,81 @@ public void update() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) - .build(); + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isOk()); + } + + @Test + public void update_elementShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); context.restoreAuthSystemState(); MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setId(metadataField.getID()); metadataFieldRest.setElement(ELEMENT_UPDATED); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); + } + + @Test + public void update_qualifierShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); metadataFieldRest.setQualifier(QUALIFIER_UPDATED); metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) .perform(put("/api/core/metadatafields/" + metadataField.getID()) - .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) - .contentType(contentType)) - .andExpect(status().isOk()); + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( - metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED) - )); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); } @Test - public void update_checkUpdatedInIndex() throws Exception { + public void update_checkNotUpdatedInIndex() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) @@ -885,27 +935,27 @@ public void update_checkUpdatedInIndex() throws Exception { .perform(put("/api/core/metadatafields/" + metadataField.getID()) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) - .andExpect(status().isOk()); + .andExpect(status().isUnprocessableEntity()); - // new metadata field found in index + // new metadata field not found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", ELEMENT_UPDATED) .param("qualifier", QUALIFIER_UPDATED)) .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( - MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), - ELEMENT_UPDATED, QUALIFIER_UPDATED)) - )) - .andExpect(jsonPath("$.page.totalElements", is(1))); + .andExpect(jsonPath("$.page.totalElements", is(0))); - // original metadata field not found in index + // original metadata field found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", metadataField.getElement()) .param("qualifier", metadataField.getQualifier())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( + MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), + ELEMENT, QUALIFIER)) + )) + .andExpect(jsonPath("$.page.totalElements", is(1))); } @Test From 0e0962e3bc66b459a2a1b8f85e317815989c439f Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Sun, 19 Mar 2023 23:41:34 +0100 Subject: [PATCH 099/686] Added WebVTT to default supported bitstream formats --- .../dspace/app/rest/BitstreamFormatRestRepositoryIT.java | 2 +- dspace/config/registries/bitstream-formats.xml | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java index f566b2fe638c..fd128269308d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java @@ -56,7 +56,7 @@ public class BitstreamFormatRestRepositoryIT extends AbstractControllerIntegrati @Autowired private BitstreamFormatConverter bitstreamFormatConverter; - private final int DEFAULT_AMOUNT_FORMATS = 84; + private final int DEFAULT_AMOUNT_FORMATS = 85; @Test public void findAllPaginationTest() throws Exception { diff --git a/dspace/config/registries/bitstream-formats.xml b/dspace/config/registries/bitstream-formats.xml index ab9e9687d7d1..3515773fd742 100644 --- a/dspace/config/registries/bitstream-formats.xml +++ b/dspace/config/registries/bitstream-formats.xml @@ -115,6 +115,15 @@ csv + + text/vtt + WebVTT + Web Video Text Tracks Format + 1 + false + vtt + + application/msword Microsoft Word From 44e7bef9caeeebd75f828851e4099bcf0b0b5a12 Mon Sep 17 00:00:00 2001 From: Ma-Tador <86478723+Ma-Tador@users.noreply.github.com> Date: Thu, 6 Apr 2023 11:24:30 +0200 Subject: [PATCH 100/686] fix bug: reset password from Admin Panel #8765 --- .../RegistrationRestRepository.java | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java index ac44ccb4c274..2cc14c1ff705 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java @@ -87,17 +87,22 @@ public RegistrationRest findOne(Context context, Integer integer) { public Page findAll(Context context, Pageable pageable) { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); } - + @Override public RegistrationRest createAndReturn(Context context) { HttpServletRequest request = requestService.getCurrentRequest().getHttpServletRequest(); ObjectMapper mapper = new ObjectMapper(); RegistrationRest registrationRest; - + String accountType = request.getParameter(TYPE_QUERY_PARAM); + if (StringUtils.isBlank(accountType) || + (!accountType.equalsIgnoreCase(TYPE_FORGOT) && !accountType.equalsIgnoreCase(TYPE_REGISTER))) { + throw new IllegalArgumentException(String.format("Needs query param '%s' with value %s or %s indicating " + + "what kind of registration request it is", TYPE_QUERY_PARAM, TYPE_FORGOT, TYPE_REGISTER)); + } String captchaToken = request.getHeader("X-Recaptcha-Token"); boolean verificationEnabled = configurationService.getBooleanProperty("registration.verification.enabled"); - if (verificationEnabled) { + if (verificationEnabled && !accountType.equalsIgnoreCase(TYPE_FORGOT)) { try { captchaService.processResponse(captchaToken, REGISTER_ACTION); } catch (InvalidReCaptchaException e) { @@ -114,12 +119,6 @@ public RegistrationRest createAndReturn(Context context) { if (StringUtils.isBlank(registrationRest.getEmail())) { throw new UnprocessableEntityException("The email cannot be omitted from the Registration endpoint"); } - String accountType = request.getParameter(TYPE_QUERY_PARAM); - if (StringUtils.isBlank(accountType) || - (!accountType.equalsIgnoreCase(TYPE_FORGOT) && !accountType.equalsIgnoreCase(TYPE_REGISTER))) { - throw new IllegalArgumentException(String.format("Needs query param '%s' with value %s or %s indicating " + - "what kind of registration request it is", TYPE_QUERY_PARAM, TYPE_FORGOT, TYPE_REGISTER)); - } EPerson eperson = null; try { eperson = ePersonService.findByEmail(context, registrationRest.getEmail()); @@ -130,14 +129,14 @@ public RegistrationRest createAndReturn(Context context) { try { if (!AuthorizeUtil.authorizeUpdatePassword(context, eperson.getEmail())) { throw new DSpaceBadRequestException("Password cannot be updated for the given EPerson with email: " - + eperson.getEmail()); + + eperson.getEmail()); } accountService.sendForgotPasswordInfo(context, registrationRest.getEmail()); } catch (SQLException | IOException | MessagingException | AuthorizeException e) { log.error("Something went wrong with sending forgot password info email: " - + registrationRest.getEmail(), e); + + registrationRest.getEmail(), e); } - } else if (accountType.equalsIgnoreCase(TYPE_REGISTER)) { + } else if (accountType.equalsIgnoreCase(TYPE_REGISTER)) { try { String email = registrationRest.getEmail(); if (!AuthorizeUtil.authorizeNewAccountRegistration(context, request)) { @@ -152,12 +151,13 @@ public RegistrationRest createAndReturn(Context context) { accountService.sendRegistrationInfo(context, email); } catch (SQLException | IOException | MessagingException | AuthorizeException e) { log.error("Something went wrong with sending registration info email: " - + registrationRest.getEmail(), e); + + registrationRest.getEmail(), e); } } return null; } + @Override public Class getDomainClass() { return RegistrationRest.class; From b36e6df9bcd5c6e542aae1e1b2aeaa503cb1ceff Mon Sep 17 00:00:00 2001 From: Mark Cooper Date: Tue, 21 Feb 2023 14:44:36 -0800 Subject: [PATCH 101/686] Build, use, publish dspace/dspace-solr imgs --- .dockerignore | 1 - .github/workflows/docker.yml | 26 ++++++++++++++ docker-compose.yml | 27 +++++++------- dspace/src/main/docker/README.md | 17 +++++++++ dspace/src/main/docker/dspace-solr/Dockerfile | 36 +++++++++++++++++++ 5 files changed, 92 insertions(+), 15 deletions(-) create mode 100644 dspace/src/main/docker/dspace-solr/Dockerfile diff --git a/.dockerignore b/.dockerignore index 0e42960dc9c0..7d3bdc2b4b0d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,5 @@ dspace/modules/*/target/ Dockerfile.* dspace/src/main/docker/dspace-postgres-pgcrypto dspace/src/main/docker/dspace-postgres-pgcrypto-curl -dspace/src/main/docker/solr dspace/src/main/docker/README.md dspace/src/main/docker-compose/ diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 64e12f01aac0..faa50ac8dcc2 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -170,3 +170,29 @@ jobs: # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build_cli.outputs.tags }} labels: ${{ steps.meta_build_cli.outputs.labels }} + + ########################################### + # Build/Push the 'dspace/dspace-solr' image + ########################################### + # Get Metadata for docker_build_solr step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image + id: meta_build_solr + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-solr + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Build and push 'dspace-solr' image + id: docker_build_solr + uses: docker/build-push-action@v3 + with: + context: . + file: ./dspace/src/main/docker/dspace-solr/Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_solr.outputs.tags }} + labels: ${{ steps.meta_build_solr.outputs.labels }} diff --git a/docker-compose.yml b/docker-compose.yml index 6008b873ae5f..40b4ce064eab 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -81,8 +81,12 @@ services: # DSpace Solr container dspacesolr: container_name: dspacesolr - # Uses official Solr image at https://hub.docker.com/_/solr/ - image: solr:8.11-slim + image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-dspace-7_x}" + build: + context: . + dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile + args: + SOLR_VERSION: "${SOLR_VER:-8.11}" networks: dspacenet: ports: @@ -92,30 +96,25 @@ services: tty: true working_dir: /var/solr/data volumes: - # Mount our local Solr core configs so that they are available as Solr configsets on container - - ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority - - ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai - - ./dspace/solr/search:/opt/solr/server/solr/configsets/search - - ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics # Keep Solr data directory between reboots - solr_data:/var/solr/data - # Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr + # Initialize all DSpace Solr cores then start Solr: # * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op - # * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core - # to the latest configs. If it's a newly created core, this is a no-op. + # * Second, copy configsets to this core: + # Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr` entrypoint: - /bin/bash - '-c' - | init-var-solr precreate-core authority /opt/solr/server/solr/configsets/authority - cp -r -u /opt/solr/server/solr/configsets/authority/* authority + cp -r /opt/solr/server/solr/configsets/authority/* authority precreate-core oai /opt/solr/server/solr/configsets/oai - cp -r -u /opt/solr/server/solr/configsets/oai/* oai + cp -r /opt/solr/server/solr/configsets/oai/* oai precreate-core search /opt/solr/server/solr/configsets/search - cp -r -u /opt/solr/server/solr/configsets/search/* search + cp -r /opt/solr/server/solr/configsets/search/* search precreate-core statistics /opt/solr/server/solr/configsets/statistics - cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics + cp -r /opt/solr/server/solr/configsets/statistics/* statistics exec solr -f volumes: assetstore: diff --git a/dspace/src/main/docker/README.md b/dspace/src/main/docker/README.md index 6c9da0190cd2..f2a944f60851 100644 --- a/dspace/src/main/docker/README.md +++ b/dspace/src/main/docker/README.md @@ -130,6 +130,23 @@ docker run -i -t -d -p 80:80 -p 443:443 dspace/dspace-shibboleth This image can also be rebuilt using the `../docker-compose/docker-compose-shibboleth.yml` script. +## dspace/src/main/docker/dspace-solr/Dockerfile + +This Dockerfile builds a Solr image with DSpace Solr configsets included. It +can be pulled / built following the [docker compose resources](../docker-compose/README.md) +documentation. Or, to just build and/or run Solr: + +```bash +docker-compose build dspacesolr +docker-compose -p d7 up -d dspacesolr +``` + +If you're making iterative changes to the DSpace Solr configsets you'll need to rebuild / +restart the `dspacesolr` container for the changes to be deployed. From DSpace root: + +```bash +docker-compose -p d7 up --detach --build dspacesolr +``` ## test/ folder diff --git a/dspace/src/main/docker/dspace-solr/Dockerfile b/dspace/src/main/docker/dspace-solr/Dockerfile new file mode 100644 index 000000000000..9fe9adf9440f --- /dev/null +++ b/dspace/src/main/docker/dspace-solr/Dockerfile @@ -0,0 +1,36 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +# To build use root as context for (easier) access to solr cfgs +# docker build --build-arg SOLR_VERSION=8.11 -f ./dspace/src/main/docker/dspace-solr/Dockerfile . +# This will be published as dspace/dspace-solr:$DSPACE_VERSION + +ARG SOLR_VERSION=8.11 + +FROM solr:${SOLR_VERSION}-slim + +ENV AUTHORITY_CONFIGSET_PATH=/opt/solr/server/solr/configsets/authority/conf \ + OAI_CONFIGSET_PATH=/opt/solr/server/solr/configsets/oai/conf \ + SEARCH_CONFIGSET_PATH=/opt/solr/server/solr/configsets/search/conf \ + STATISTICS_CONFIGSET_PATH=/opt/solr/server/solr/configsets/statistics/conf + +USER root + +RUN mkdir -p $AUTHORITY_CONFIGSET_PATH && \ + mkdir -p $OAI_CONFIGSET_PATH && \ + mkdir -p $SEARCH_CONFIGSET_PATH && \ + mkdir -p $STATISTICS_CONFIGSET_PATH + +COPY dspace/solr/authority/conf/* $AUTHORITY_CONFIGSET_PATH/ +COPY dspace/solr/oai/conf/* $OAI_CONFIGSET_PATH/ +COPY dspace/solr/search/conf/* $SEARCH_CONFIGSET_PATH/ +COPY dspace/solr/statistics/conf/* $STATISTICS_CONFIGSET_PATH/ + +RUN chown -R solr:solr /opt/solr/server/solr/configsets + +USER solr From 11bc061ff9068fe64835251962b08f495fed03f2 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Wed, 5 Apr 2023 13:20:24 +0200 Subject: [PATCH 102/686] 97248: Implement feedback --- .../org/dspace/discovery/SearchUtils.java | 22 +- .../discovery/SolrServiceFileInfoPlugin.java | 8 +- .../DiscoveryConfigurationService.java | 75 +- .../InprogressSubmissionIndexFactoryImpl.java | 2 +- .../indexobject/ItemIndexFactoryImpl.java | 2 +- .../repository/DiscoveryRestRepository.java | 10 +- .../config/spring/api/discovery.xml | 3067 ----------------- .../DiscoveryScopeBasedRestControllerIT.java | 22 +- .../app/rest/matcher/FacetEntryMatcher.java | 2 +- 9 files changed, 95 insertions(+), 3115 deletions(-) delete mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 418720be4a48..4a53e344544a 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -62,12 +62,24 @@ public static SearchService getSearchService() { return searchService; } + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, - DSpaceObject dso) { + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { return getDiscoveryConfiguration(context, null, dso); } @@ -84,7 +96,7 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, String prefix, + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); @@ -161,12 +173,12 @@ public static List getIgnoredMetadataFields(int type) { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * - * @param item the DSpace item * @param context the database context + * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item, Context context) + public static List getAllDiscoveryConfigurations(Context context, Item item) throws SQLException { List collections = item.getCollections(); return getAllDiscoveryConfigurations(context, null, collections, item); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 6bda2fc52d84..7aece5acf313 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,16 +53,20 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + // Add _keyword and _filter fields which are necessary to support filtering and faceting + // for the file names document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + // Add _keyword and _filter fields which are necessary to support filtering and + // faceting for the descriptions document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", - bitstream.getName()); + description); document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", - bitstream.getName()); + description); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 7d5b435555f9..da23b87a3594 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -17,6 +17,8 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; @@ -36,10 +38,11 @@ public class DiscoveryConfigurationService { private Map> toIgnoreMetadataFields = new HashMap<>(); /** - * Discovery configurations, cached by DSO UUID. When a DSO doesn't have its own configuration, we take the one of - * the first parent that does. This cache ensures we don't have to go up the hierarchy every time. + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. */ - private final Map uuidMap = new HashMap<>(); + private final Map comColToDiscoveryConfigurationMap = new HashMap<>(); public Map getMap() { return map; @@ -57,15 +60,26 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, - IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; - if (dso == null) { - name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) dso).getIndexedObject()); + if (indexableObject == null) { + return getDiscoveryConfiguration(null); + } else if (indexableObject instanceof IndexableDSpaceObject) { + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); } return getDiscoveryConfiguration(name); } @@ -77,16 +91,15 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, * @param dso - The DSpace object to retrieve the configuration for * @return the discovery configuration for the provided DSO. */ - public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, - DSpaceObject dso) { + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { // Fall back to default configuration if (dso == null) { - return getDiscoveryConfiguration("default", false); + return getDiscoveryConfiguration(null, true); } // Attempt to retrieve cached configuration by UUID - if (uuidMap.containsKey(dso.getID())) { - return uuidMap.get(dso.getID()); + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); } DiscoveryConfiguration configuration; @@ -107,13 +120,21 @@ public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context configuration = getDiscoveryDSOConfiguration(context, parentObject); } - // Cache the resulting configuration - uuidMap.put(dso.getID(), configuration); + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } return configuration; } - public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { return getDiscoveryConfiguration(name, true); } @@ -138,13 +159,23 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boole return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final Context context, - final String configurationName, - final IndexableObject dso) { + /** + * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be + * checked for the provided name. When no match is found for the name, the configuration will be retrieved for the + * IndexableObject + * + * @param context - The database context + * @param configurationName - The name of the configuration to be retrieved + * @param indexableObject - The indexable object to retrieve the configuration for + * @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject + */ + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context, + String configurationName, + IndexableObject indexableObject) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(context, dso); + return getDiscoveryConfiguration(context, indexableObject); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index ebedfc34b743..04c5e7d43206 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -79,7 +79,7 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index b417237f763e..412442fb1c90 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -147,7 +147,7 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI } // Add the item metadata - List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index e337e76ef253..46c8ab3e394f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public SearchConfigurationRest getSearchConfiguration(final String dsoScope, fin IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public SearchResultsRest getSearchObjects(final String query, final List Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public FacetConfigurationRest getFacetsConfiguration(final String dsoScope, fina IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public FacetResultsRest getFacetObjects(String facetName, String prefix, String IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public SearchResultsRest getAllFacets(String query, List dsoTypes, Strin Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml deleted file mode 100644 index e029c65aa000..000000000000 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml +++ /dev/null @@ -1,3067 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.rights - - - - - - - - - - - - - - - dc.rights - - - - - - - - dc.description.provenance - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item - - withdrawn:true OR discoverable:false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:XmlWorkflowItem - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Publication - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Person - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Project - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:OrgUnit - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:JournalIssue - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:JournalVolume - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Journal - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - - - - - - - - - - - relation.isAuthorOfPublication - - - - - - - - - - - relation.isProjectOfPublication - - - - - - - - - - - - relation.isOrgUnitOfPublication - - - - - - - - - - - relation.isPublicationOfJournalIssue - - - - - - - - - - - relation.isJournalOfPublication - - - - - - - - - - - dc.contributor.author - dc.creator - - - - - - - - - - - - - - - dspace.entity.type - - - - - - - - - - - - - - dc.subject.* - - - - - - - - - - - - - - dc.date.issued - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.type - - - - - - - - - dc.identifier - - - - - - - - - placeholder.placeholder.placeholder - - - - - - - - - - placeholder.placeholder.placeholder - - - - - - - - - person.jobTitle - - - - - - - - - - - - - - - person.knowsLanguage - - - - - - - - - - - - - person.birthDate - - - - - - - - - - - - - - - - - person.familyName - - - - - - - - - - - person.givenName - - - - - - - - - - - relation.isOrgUnitOfPerson - - - - - - - - - - - relation.isProjectOfPerson - - - - - - - - - - - relation.isPublicationOfAuthor - - - - - - - - - - - - organization.address.addressCountry - - - - - - - - - - - - - - - organization.address.addressLocality - - - - - - - - - - - - - - - organization.foundingDate - - - - - - - - - - - - - - - - organization.legalName - - - - - - - - - - - relation.isPersonOfOrgUnit - - - - - - - - - - - relation.isProjectOfOrgUnit - - - - - - - - - - - relation.isPublicationOfOrgUnit - - - - - - - - - - - creativework.keywords - - - - - - - - - - - - - - - creativework.datePublished - - - - - - - - - - - - - - - - publicationissue.issueNumber - - - - - - - - - - - relation.isPublicationOfJournalIssue - - - - - - - - - - - publicationVolume.volumeNumber - - - - - - - - - - - relation.isIssueOfJournalVolume - - - - - - - - - - - relation.isJournalOfVolume - - - - - - - - - - - creativework.publisher - - - - - - - - - - - - - - - creativework.editor - - - - - - - - - - - - - - - relation.isVolumeOfJournal - - - - - - - - - - - - - - placeholder.placeholder.placeholder - - - - - - - - - - relation.isOrgUnitOfProject - - - - - - - - - - - - relation.isPersonOfProject - - - - - - - - - - - - relation.isPublicationOfProject - - - - - - - - - - - relation.isContributorOfPublication - - - - - - - - - - - relation.isPublicationOfContributor - - - - - - - - - - - relation.isFundingAgencyOfProject - - - - - - - - - - - relation.isProjectOfFundingAgency - - - - - - - - - - - dc.test.parentcommunity1field - - - - - - - - - - - - - - - dc.test.subcommunity11field - - - - - - - - - - - - - - - dc.test.collection111field - - - - - - - - - - - - - - - dc.test.collection121field - - - - - - - - - - - - - - - dc.test.subcommunity21field - - - - - - - - - - - - - - dc.test.collection211field - - - - - - - - - - - - - - dc.test.collection221field - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index 15c1019584b1..0c8735545e8b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -272,7 +272,7 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) ); getClient().perform(get("/api/discover/facets/parentcommunity1field") @@ -310,7 +310,7 @@ public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity11field") @@ -339,7 +339,7 @@ public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection111field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection111field", "text"))) ); getClient().perform(get("/api/discover/facets/collection111field") @@ -366,7 +366,7 @@ public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity11field") @@ -391,7 +391,7 @@ public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) ); getClient().perform(get("/api/discover/facets/parentcommunity1field") @@ -420,7 +420,7 @@ public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection121field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection121field", "text"))) ); getClient().perform(get("/api/discover/facets/collection121field") @@ -445,7 +445,7 @@ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) ); getClient().perform(get("/api/discover/facets/parentcommunity1field") @@ -490,7 +490,7 @@ public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity21field") @@ -519,7 +519,7 @@ public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection211field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection211field", "text"))) ); getClient().perform(get("/api/discover/facets/collection211field") @@ -546,7 +546,7 @@ public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity21field") @@ -588,7 +588,7 @@ public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection221field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection221field", "text"))) ); getClient().perform(get("/api/discover/facets/collection221field") diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 34b7b8b30d6a..60b5f417ed1e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -99,7 +99,7 @@ public static Matcher hasContentInOriginalBundleFacet(boolean ha ); } - public static Matcher matchFacet(String name, String facetType, boolean hasNext) { + public static Matcher matchFacet(boolean hasNext, String name, String facetType) { return allOf( hasJsonPath("$.name", is(name)), hasJsonPath("$.facetType", is(facetType)), From e433720cd005ddf6b6e13ce09988c72500a74115 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Wed, 5 Apr 2023 14:07:52 +0200 Subject: [PATCH 103/686] Add test-discovery xml --- .../config/spring/api/test-discovery.xml | 1115 +++++++++++++++++ 1 file changed, 1115 insertions(+) create mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml new file mode 100644 index 000000000000..8b11a87e2d0b --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -0,0 +1,1115 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + From cf831ed7d5892fb86b1569b8c02c406e546e2e8e Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Fri, 7 Apr 2023 16:26:12 +0200 Subject: [PATCH 104/686] Fix merge issues --- dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java | 2 +- .../data/dspaceFolder/config/spring/api/test-discovery.xml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index e02367f6eb8e..f99aab852bf5 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -239,7 +239,7 @@ private void addLocationScopeFilter(DiscoverQuery query) { } private void addDefaultFilterQueries(DiscoverQuery query) { - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); } diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index 8b11a87e2d0b..4a91ef051e88 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -39,10 +39,13 @@ + + + From d9cb1e51c48fe62f266276551882dc5408e423e5 Mon Sep 17 00:00:00 2001 From: TS Date: Tue, 11 Apr 2023 14:41:17 -0300 Subject: [PATCH 105/686] Typo fix in dspace.cfg Fixed typo "dermine" to "determine" in `dspace/config/dspace.cfg` file --- dspace/config/dspace.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 5d69d9295bf1..8b7e46cd48ad 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -843,7 +843,7 @@ plugin.single.org.dspace.embargo.EmbargoSetter = org.dspace.embargo.DefaultEmbar plugin.single.org.dspace.embargo.EmbargoLifter = org.dspace.embargo.DefaultEmbargoLifter # values for the forever embargo date threshold -# This threshold date is used in the default access status helper to dermine if an item is +# This threshold date is used in the default access status helper to determine if an item is # restricted or embargoed based on the start date of the primary (or first) file policies. # In this case, if the policy start date is inferior to the threshold date, the status will # be embargo, else it will be restricted. From 1c41794ddd42169f7d5d2bdb674f052135da11ac Mon Sep 17 00:00:00 2001 From: TS Date: Tue, 11 Apr 2023 15:02:24 -0300 Subject: [PATCH 106/686] Fix typo in dspace.cfg Fixed typo "cummulative" to "cumulative" in dspace/config/dspace.cfg file --- dspace/config/dspace.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 8b7e46cd48ad..65b1f951fad3 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -880,7 +880,7 @@ org.dspace.app.itemexport.life.span.hours = 48 # The maximum size in Megabytes the export should be. This is enforced before the # compression. Each bitstream's size in each item being exported is added up, if their -# cummulative sizes are more than this entry the export is not kicked off +# cumulative sizes are more than this entry the export is not kicked off org.dspace.app.itemexport.max.size = 200 ### Batch Item import settings ### From 9ef37940905154e5f838fbe559c9588617060e7f Mon Sep 17 00:00:00 2001 From: corrado lombardi Date: Wed, 12 Apr 2023 15:29:52 +0200 Subject: [PATCH 107/686] updated checksum calculation --- .../java/org/dspace/storage/bitstore/S3BitStoreService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index d2c9839905cd..bf46687ee97e 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -359,7 +359,7 @@ public Map about(Bitstream bitstream, List attrs) throws if (attrs.contains("checksum")) { try (InputStream in = get(bitstream)) { byte[] md5Digest = MessageDigest.getInstance(CSA).digest(IOUtils.toByteArray(in)); - metadata.put("checksum", Base64.encodeBase64String(md5Digest)); + metadata.put("checksum", Utils.toHex(md5Digest)); } catch (NoSuchAlgorithmException nsae) { // Should never happen log.warn("Caught NoSuchAlgorithmException", nsae); From d8357d77aa4cf329c6c8aa7f1b7aa09e1de65359 Mon Sep 17 00:00:00 2001 From: corrado lombardi Date: Wed, 12 Apr 2023 18:45:27 +0200 Subject: [PATCH 108/686] used utils.copy to copy input stream content in order to prevent oom problems --- .../org/dspace/storage/bitstore/S3BitStoreService.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index bf46687ee97e..7a09dd2e76df 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -44,8 +44,7 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.IOUtils; +import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; @@ -357,8 +356,11 @@ public Map about(Bitstream bitstream, List attrs) throws putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA); if (attrs.contains("checksum")) { - try (InputStream in = get(bitstream)) { - byte[] md5Digest = MessageDigest.getInstance(CSA).digest(IOUtils.toByteArray(in)); + try (InputStream in = get(bitstream); + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)) + ) { + Utils.copy(dis, NullOutputStream.NULL_OUTPUT_STREAM); + byte[] md5Digest = dis.getMessageDigest().digest(); metadata.put("checksum", Utils.toHex(md5Digest)); } catch (NoSuchAlgorithmException nsae) { // Should never happen From 4685c9fc8d362cd2668d70cdb335bba00a32e432 Mon Sep 17 00:00:00 2001 From: corrado lombardi Date: Wed, 12 Apr 2023 19:20:40 +0200 Subject: [PATCH 109/686] updated test expectation --- .../java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java index 7e14f82be289..60793ac610bd 100644 --- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java @@ -242,7 +242,7 @@ public void testAbout() throws IOException { assertThat(about, hasEntry(is("modified"), notNullValue())); assertThat(about.size(), is(2)); - String expectedChecksum = Base64.encodeBase64String(generateChecksum(content)); + String expectedChecksum = Utils.toHex(generateChecksum(content)); about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum")); assertThat(about, hasEntry("size_bytes", 22L)); From 77d552b02997ee4c36dc277f23b54880a3605ee9 Mon Sep 17 00:00:00 2001 From: Chandaluri Vamsi Krishna Date: Thu, 13 Apr 2023 09:50:57 +0530 Subject: [PATCH 110/686] Added logic for default description Signed-off-by: Chandaluri Vamsi Krishna --- .../org/dspace/app/util/SyndicationFeed.java | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 8f155b63307d..5b0520a3e1d1 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -91,6 +91,7 @@ public class SyndicationFeed { // default DC fields for entry protected String defaultTitleField = "dc.title"; + protected String defaultDescriptionField = "dc.description"; protected String defaultAuthorField = "dc.contributor.author"; protected String defaultDateField = "dc.date.issued"; private static final String[] defaultDescriptionFields = @@ -196,15 +197,15 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); - feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); + defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION); objectURL = resolveURL(request, null); } else { Bitstream logo = null; if (dso instanceof IndexableCollection) { Collection col = ((IndexableCollection) dso).getIndexedObject(); defaultTitle = col.getName(); - feed.setDescription(collectionService.getMetadataFirstValue(col, - CollectionService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = collectionService.getMetadataFirstValue(col, + CollectionService.MD_SHORT_DESCRIPTION, Item.ANY); logo = col.getLogo(); String cols = configurationService.getProperty("webui.feed.podcast.collections"); if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) { @@ -214,8 +215,8 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } else if (dso instanceof IndexableCommunity) { Community comm = ((IndexableCommunity) dso).getIndexedObject(); defaultTitle = comm.getName(); - feed.setDescription(communityService.getMetadataFirstValue(comm, - CommunityService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = communityService.getMetadataFirstValue(comm, + CommunityService.MD_SHORT_DESCRIPTION, Item.ANY); logo = comm.getLogo(); String comms = configurationService.getProperty("webui.feed.podcast.communities"); if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) { @@ -230,6 +231,12 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ? localize(labels, MSG_FEED_TITLE) : defaultTitle); + + if (defaultDescriptionField == null || defaultDescriptionField == "") { + defaultDescriptionField = "No Description"; + } + + feed.setDescription(defaultDescriptionField); feed.setLink(objectURL); feed.setPublishedDate(new Date()); feed.setUri(objectURL); From 9a345d3e6426bbd8d2b70e02586447cbe2439cff Mon Sep 17 00:00:00 2001 From: Chandaluri Vamsi Krishna Date: Thu, 13 Apr 2023 09:52:14 +0530 Subject: [PATCH 111/686] Testing of default description Signed-off-by: Chandaluri Vamsi Krishna --- .../opensearch/OpenSearchControllerIT.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java index ac03e946e320..1ddea619d2fc 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java @@ -249,4 +249,24 @@ public void serviceDocumentTest() throws Exception { */ } + + @Test + public void emptyDescriptionTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection collection1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1") + .build(); + + getClient().perform(get("/opensearch/search") + .param("format", "rss") + .param("scope", collection1.getID().toString()) + .param("query", "*")) + .andExpect(status().isOk()) + .andExpect(xpath("rss/channel/description").string("No Description")); + } } From b2b4dda243b4dc4689e8d727a7f933ba6d0d603c Mon Sep 17 00:00:00 2001 From: corrado lombardi Date: Thu, 13 Apr 2023 08:35:38 +0200 Subject: [PATCH 112/686] checkstyle fix --- .../java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java index 60793ac610bd..7aae1cf2719c 100644 --- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java @@ -40,7 +40,6 @@ import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.services.s3.model.ObjectMetadata; import io.findify.s3mock.S3Mock; -import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.dspace.AbstractIntegrationTestWithDatabase; From fe5a3052648f22b22e0cd1da86cb8e34293978ea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Apr 2023 17:40:58 +0000 Subject: [PATCH 113/686] Bump json from 20180130 to 20230227 in /dspace-api Bumps [json](https://github.com/douglascrockford/JSON-java) from 20180130 to 20230227. - [Release notes](https://github.com/douglascrockford/JSON-java/releases) - [Changelog](https://github.com/stleary/JSON-java/blob/master/docs/RELEASES.md) - [Commits](https://github.com/douglascrockford/JSON-java/commits) --- updated-dependencies: - dependency-name: org.json:json dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- dspace-api/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index d33387859fb3..c1811a9d63a1 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -776,7 +776,7 @@ org.json json - 20180130 + 20230227 From e00388995bc2911a83c150dd2a642a4616431a47 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 14 Apr 2023 16:45:15 -0500 Subject: [PATCH 114/686] Split Codecov upload into a separate job. Make it auto-retry up to five times --- .github/workflows/build.yml | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 52714a8ba28f..99c9efe0190f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -79,6 +79,39 @@ jobs: name: ${{ matrix.type }} results path: ${{ matrix.resultsdir }} - # https://github.com/codecov/codecov-action + # Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below) + - name: Upload code coverage report to Artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.type }} coverage report + path: 'dspace/target/site/jacoco-aggregate/jacoco.xml' + retention-days: 14 + + # Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test + # job above. This is necessary because Codecov uploads seem to randomly fail at times. + # See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954 + codecov: + # Must run after 'tests' job above + needs: tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + # Download artifacts from previous 'tests' job + - name: Download coverage artifacts + uses: actions/download-artifact@v3 + + # Now attempt upload to Codecov using its action. + # NOTE: We use a retry action to retry the Codecov upload if it fails the first time. + # + # Retry action: https://github.com/marketplace/actions/retry-action + # Codecov action: https://github.com/codecov/codecov-action - name: Upload coverage to Codecov.io - uses: codecov/codecov-action@v3 + uses: Wandalen/wretry.action@v1.0.36 + with: + action: codecov/codecov-action@v3 + # Try upload 5 times max + attempt_limit: 5 + # Run again in 30 seconds + attempt_delay: 30000 From 9b0f2ed43332d4fd91c0825ed199c570c6fbc816 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 14 Apr 2023 16:45:52 -0500 Subject: [PATCH 115/686] Update to latest version of actions --- .github/workflows/issue_opened.yml | 2 +- .github/workflows/label_merge_conflicts.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/issue_opened.yml b/.github/workflows/issue_opened.yml index 5d7c1c30f7d3..b4436dca3aad 100644 --- a/.github/workflows/issue_opened.yml +++ b/.github/workflows/issue_opened.yml @@ -16,7 +16,7 @@ jobs: # Only add to project board if issue is flagged as "needs triage" or has no labels # NOTE: By default we flag new issues as "needs triage" in our issue template if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '') - uses: actions/add-to-project@v0.3.0 + uses: actions/add-to-project@v0.5.0 # Note, the authentication token below is an ORG level Secret. # It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index d71d244c2b02..cc0c7099f40e 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -23,7 +23,7 @@ jobs: steps: # See: https://github.com/prince-chrismc/label-merge-conflicts-action - name: Auto-label PRs with merge conflicts - uses: prince-chrismc/label-merge-conflicts-action@v2 + uses: prince-chrismc/label-merge-conflicts-action@v3 # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Note, the authentication token is created automatically # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token From 1ccde8468debf2e0db662a72efc5300b444a3b55 Mon Sep 17 00:00:00 2001 From: Chandaluri Vamsi Krishna Date: Sat, 15 Apr 2023 11:42:02 +0530 Subject: [PATCH 116/686] Removed useProxies usage Signed-off-by: Chandaluri Vamsi Krishna --- .../java/org/dspace/authenticate/IPAuthentication.java | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java index 9c37fcee4755..3b2366034489 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java @@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod { */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class); - /** - * Whether to look for x-forwarded headers for logging IP addresses - */ - protected static Boolean useProxies; - /** * All the IP matchers */ @@ -250,7 +245,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) log.debug(LogHelper.getHeader(context, "authenticated", "special_groups=" + gsb.toString() - + " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")" + + " (by IP=" + addr + ")" )); } From d5178a71290c33d6eb518c63540b63cd5c1702fe Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Mon, 17 Apr 2023 09:26:49 -0400 Subject: [PATCH 117/686] Document the need to avoid commit() during event dispatch. --- .../main/java/org/dspace/event/Consumer.java | 31 ++++++++++--------- .../java/org/dspace/event/package-info.java | 20 ++++++++++++ 2 files changed, 37 insertions(+), 14 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/event/package-info.java diff --git a/dspace-api/src/main/java/org/dspace/event/Consumer.java b/dspace-api/src/main/java/org/dspace/event/Consumer.java index 1a8b16e98a0b..f56efcc7bacb 100644 --- a/dspace-api/src/main/java/org/dspace/event/Consumer.java +++ b/dspace-api/src/main/java/org/dspace/event/Consumer.java @@ -10,18 +10,16 @@ import org.dspace.core.Context; /** - * Interface for content event consumers. Note that the consumer cannot tell if - * it is invoked synchronously or asynchronously; the consumer interface and - * sequence of calls is the same for both. Asynchronous consumers may see more - * consume() calls between the start and end of the event stream, if they are - * invoked asynchronously, once in a long time period, rather than synchronously - * after every Context.commit(). - * - * @version $Revision$ + * Interface for content event consumers. Note that the consumer cannot tell + * if it is invoked synchronously or asynchronously; the consumer interface + * and sequence of calls is the same for both. Asynchronous consumers may see + * more consume() calls between the start and end of the event stream, if they + * are invoked asynchronously, once in a long time period, rather than + * synchronously after every Context.commit(). */ public interface Consumer { /** - * Initialize - allocate any resources required to operate. This may include + * Allocate any resources required to operate. This may include * initializing any pooled JMS resources. Called ONCE when created by the * dispatcher pool. This should be used to set up expensive resources that * will remain for the lifetime of the consumer. @@ -31,12 +29,17 @@ public interface Consumer { public void initialize() throws Exception; /** - * Consume an event; events may get filtered at the dispatcher level, hiding - * it from the consumer. This behavior is based on the dispatcher/consumer - * configuration. Should include logic to initialize any resources required - * for a batch of events. + * Consume an event. Events may be filtered by a dispatcher, hiding them + * from the consumer. This behavior is based on the dispatcher/consumer + * configuration. Should include logic to initialize any resources + * required for a batch of events. + * + *

This method must not commit the context. Committing causes + * re-dispatch of the event queue, which can result in infinite recursion + * leading to memory exhaustion as seen in + * {@link https://github.com/DSpace/DSpace/pull/8756}. * - * @param ctx the execution context object + * @param ctx the current DSpace session * @param event the content event * @throws Exception if error */ diff --git a/dspace-api/src/main/java/org/dspace/event/package-info.java b/dspace-api/src/main/java/org/dspace/event/package-info.java new file mode 100644 index 000000000000..544dfb271a1d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/event/package-info.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + * Actions which alter DSpace model objects can queue {@link Event}s, which + * are presented to {@link Consumer}s by a {@link Dispatcher}. A pool of + * {@code Dispatcher}s is managed by an {@link service.EventService}, guided + * by configuration properties {@code event.dispatcher.*}. + * + *

One must be careful not to commit the current DSpace {@code Context} + * during event dispatch. {@code commit()} triggers event dispatching, and + * doing this during event dispatch can lead to infinite recursion and + * memory exhaustion. + */ + +package org.dspace.event; From b48ea37de5d2b87d550099c41526cdbbf2969831 Mon Sep 17 00:00:00 2001 From: Tuan Nguyen Date: Mon, 17 Apr 2023 15:16:17 -0400 Subject: [PATCH 118/686] properly escape ':' in query --- .../java/org/dspace/content/authority/SolrAuthority.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 497fa08f2faf..123626cd0965 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -200,8 +200,8 @@ protected void addExternalResults(String text, ArrayList choices, List Date: Wed, 5 Apr 2023 17:48:55 +0200 Subject: [PATCH 119/686] 100414: Missing search_result statistics fix --- .../rest/converter/SearchEventConverter.java | 10 +++ .../app/rest/model/SearchEventRest.java | 9 +++ .../app/rest/utils/DSpaceObjectResolver.java | 61 +++++++++++++++++++ .../app/rest/SearchEventRestRepositoryIT.java | 55 +++++++++++++++++ 4 files changed, 135 insertions(+) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java index 470a3ac3425b..a8203e272f29 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java @@ -14,6 +14,7 @@ import org.dspace.app.rest.model.PageRest; import org.dspace.app.rest.model.SearchEventRest; import org.dspace.app.rest.model.SearchResultsRest; +import org.dspace.app.rest.utils.DSpaceObjectResolver; import org.dspace.app.rest.utils.ScopeResolver; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; @@ -29,11 +30,20 @@ public class SearchEventConverter { @Autowired private ScopeResolver scopeResolver; + @Autowired + private DSpaceObjectResolver dSpaceObjectResolver; + public UsageSearchEvent convert(Context context, HttpServletRequest request, SearchEventRest searchEventRest) { UsageSearchEvent usageSearchEvent = new UsageSearchEvent(UsageEvent.Action.SEARCH, request, context, null); usageSearchEvent.setQuery(searchEventRest.getQuery()); usageSearchEvent.setDsoType(searchEventRest.getDsoType()); + if (searchEventRest.getObject() != null) { + IndexableObject object = dSpaceObjectResolver.resolveObject(context, searchEventRest.getObject()); + if (object != null && object.getIndexedObject() instanceof DSpaceObject) { + usageSearchEvent.setObject((DSpaceObject) object.getIndexedObject()); + } + } if (searchEventRest.getScope() != null) { IndexableObject scopeObject = scopeResolver.resolveScope(context, String.valueOf(searchEventRest.getScope())); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java index e029dbaf9919..637acb9bfd68 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java @@ -25,6 +25,7 @@ public class SearchEventRest extends BaseObjectRest { private UUID scope; private String configuration; private String dsoType; + private UUID object; private List appliedFilters; private SearchResultsRest.Sorting sort; private PageRest page; @@ -97,4 +98,12 @@ public String getDsoType() { public void setDsoType(String dsoType) { this.dsoType = dsoType; } + + public UUID getObject() { + return object; + } + + public void setObject(UUID object) { + this.object = object; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java new file mode 100644 index 000000000000..7ded06bdf52a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.utils; + +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.log4j.Logger; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class DSpaceObjectResolver { + /* Log4j logger */ + private static final Logger log = Logger.getLogger(DSpaceObjectResolver.class); + + @Autowired + ItemService itemService; + + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + + public IndexableObject resolveObject(Context context, UUID uuid) { + IndexableObject object = null; + if (uuid != null) { + try { + object = new IndexableCommunity(communityService.find(context, uuid)); + if (object.getIndexedObject() == null) { + object = new IndexableCollection(collectionService.find(context, uuid)); + } + if (object.getIndexedObject() == null) { + object = new IndexableItem(itemService.find(context, uuid)); + } + if (object.getIndexedObject() == null) { + throw new IllegalArgumentException("UUID " + uuid + " is expected to resolve to a Community, " + + "Collection or Item, but didn't resolve to any"); + } + } catch (SQLException e) { + log.warn("Unable to retrieve DSpace Object with ID " + uuid + " from the database", e); + } + } + return object; + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index bd40cfdc9dd8..63ca4b735dae 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -411,4 +411,59 @@ public void postTestSuccesEmptyQuery() throws Exception { .andExpect(status().isCreated()); } + + @Test + public void postTestWithObjectSuccess() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(publicItem1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setObject(publicItem1.getID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + } } From e3fb8bf601177e6cfff43b44f59cf14e630d2bb1 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 19 Apr 2023 11:21:13 -0500 Subject: [PATCH 120/686] Update to latest Jetty v9 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 3d9e6851d1d0..3b6f7edb6549 100644 --- a/pom.xml +++ b/pom.xml @@ -37,7 +37,7 @@ 2.3.1 1.1.0 - 9.4.48.v20220622 + 9.4.51.v20230217 2.17.1 2.0.27 1.18.0 From aa7acb5be782d1eafdc5ac4fa6bb2c6de7796b38 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 19 Apr 2023 14:25:18 -0500 Subject: [PATCH 121/686] Update Postgres Docker scripts to build/tag images automatically --- .github/workflows/docker.yml | 57 +++++++++++++++++++ docker-compose.yml | 10 +++- .../src/main/docker-compose/db.entities.yml | 2 +- .../dspace-postgres-pgcrypto-curl/Dockerfile | 16 ++++-- .../dspace-postgres-pgcrypto/Dockerfile | 14 +++-- 5 files changed, 85 insertions(+), 14 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index faa50ac8dcc2..971954a5e1ee 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -196,3 +196,60 @@ jobs: # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build_solr.outputs.tags }} labels: ${{ steps.meta_build_solr.outputs.labels }} + + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image + ########################################################### + # Get Metadata for docker_build_postgres step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image + id: meta_build_postgres + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-postgres-pgcrypto + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Build and push 'dspace-postgres-pgcrypto' image + id: docker_build_postgres + uses: docker/build-push-action@v3 + with: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + dockerfile: Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_postgres.outputs.tags }} + labels: ${{ steps.meta_build_postgres.outputs.labels }} + + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image ('-loadsql' tag) + ########################################################### + # Get Metadata for docker_build_postgres_loadsql step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image + id: meta_build_postgres_loadsql + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-postgres-pgcrypto + tags: ${{ env.IMAGE_TAGS }} + # Suffix all tags with "-loadsql". Otherwise, it uses the same + # tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above. + flavor: ${{ env.TAGS_FLAVOR }} + suffix=-loadsql + + - name: Build and push 'dspace-postgres-pgcrypto-loadsql' image + id: docker_build_postgres_loadsql + uses: docker/build-push-action@v3 + with: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ + dockerfile: Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }} + labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 40b4ce064eab..dd26d6cf150b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -62,13 +62,16 @@ services: while (! /dev/null 2>&1; do sleep 1; done; /dspace/bin/dspace database migrate catalina.sh run - # DSpace database container + # DSpace PostgreSQL database container dspacedb: container_name: dspacedb + # Uses a custom Postgres image with pgcrypto installed + image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-dspace-7_x}" + build: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ environment: PGDATA: /pgdata - # Uses a custom Postgres image with pgcrypto installed - image: dspace/dspace-postgres-pgcrypto networks: dspacenet: ports: @@ -77,6 +80,7 @@ services: stdin_open: true tty: true volumes: + # Keep Postgres data directory between reboots - pgdata:/pgdata # DSpace Solr container dspacesolr: diff --git a/dspace/src/main/docker-compose/db.entities.yml b/dspace/src/main/docker-compose/db.entities.yml index 8d86f7bb8359..32c54a5d0bd1 100644 --- a/dspace/src/main/docker-compose/db.entities.yml +++ b/dspace/src/main/docker-compose/db.entities.yml @@ -10,7 +10,7 @@ version: "3.7" services: dspacedb: - image: dspace/dspace-postgres-pgcrypto:loadsql + image: dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql environment: # This SQL is available from https://github.com/DSpace-Labs/AIP-Files/releases/tag/demo-entities-data - LOADSQL=https://github.com/DSpace-Labs/AIP-Files/releases/download/demo-entities-data/dspace7-entities-data.sql diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile index 0e85dd33ce59..1f51d299f20b 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile @@ -6,14 +6,20 @@ # http://www.dspace.org/license/ # -# This will be deployed as dspace/dspace-postgres-pgcrpyto:loadsql -FROM postgres:11 +# To build for example use: +# docker build --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass -f ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile . +# This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION-loadsql + +ARG POSTGRES_VERSION=11 +ARG POSTGRES_PASSWORD=dspace + +FROM postgres:${POSTGRES_VERSION}-alpine ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD dspace -# Load a SQL dump. Set LOADSQL to a URL for the sql dump file. -RUN apt-get update && apt-get install -y curl +# Install curl which is necessary to load SQL file +RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* +# Load a SQL dump. Set LOADSQL to a URL for the sql dump file. COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile index 84b7569a2b2c..0f0f4f0efba7 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile @@ -6,13 +6,17 @@ # http://www.dspace.org/license/ # -# This will be deployed as dspace/dspace-postgres-pgcrpyto:latest -FROM postgres:11 +# To build for example use: +# docker build --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass -f ./dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile . +# This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION + +ARG POSTGRES_VERSION=11 +ARG POSTGRES_PASSWORD=dspace + +FROM postgres:${POSTGRES_VERSION}-alpine ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD dspace - -RUN apt-get update +# Copy over script which will initialize database and install pgcrypto extension COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ From 5e013a8a36cc8a2b844fdfb8eacae8ff6a8e1390 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 19 Apr 2023 14:25:52 -0500 Subject: [PATCH 122/686] Update to latest version of Ant --- Dockerfile | 2 +- Dockerfile.cli | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 444a1bcf0b55..f1ff6adf5ac5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ ARG TARGET_DIR=dspace-installer COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH # Need wget to install ant diff --git a/Dockerfile.cli b/Dockerfile.cli index 76e559fc83c3..6878d8f8dbfb 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -30,7 +30,7 @@ ARG TARGET_DIR=dspace-installer COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH # Need wget to install ant From 3df54e549ff85c50063f445ca3762da09014b810 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 19 Apr 2023 15:14:40 -0500 Subject: [PATCH 123/686] Revert change to alpine linux. Scripts all assume debian. --- dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile | 2 +- dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile index 1f51d299f20b..3e38cdae27a3 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile @@ -13,7 +13,7 @@ ARG POSTGRES_VERSION=11 ARG POSTGRES_PASSWORD=dspace -FROM postgres:${POSTGRES_VERSION}-alpine +FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile index 0f0f4f0efba7..bf8344103c44 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile @@ -13,7 +13,7 @@ ARG POSTGRES_VERSION=11 ARG POSTGRES_PASSWORD=dspace -FROM postgres:${POSTGRES_VERSION}-alpine +FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace From d9f7e04368f60b38a0cea43d7a8771121feaf346 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Thu, 20 Apr 2023 11:13:28 +0200 Subject: [PATCH 124/686] #8636 Request a Copy's "helpdesk" strategy requires authentication to respond --- .../org/dspace/app/requestitem/RequestItemEmailNotifier.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 384f33decaf2..61b42fd185e2 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -175,9 +175,12 @@ static public void sendResponse(Context context, RequestItem ri, String subject, } } else { Bitstream bitstream = ri.getBitstream(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } email.send(); } else { From 98b843668e07ae2112b0ce7dbbd741af7afd3189 Mon Sep 17 00:00:00 2001 From: Sean Kalynuk Date: Thu, 20 Apr 2023 14:08:30 -0500 Subject: [PATCH 125/686] Fix #8490 preferred label lookup When the authority key still needs to be generated, skip the lookup for the preferred label since it does not exist in Solr yet. --- .../dspace/discovery/indexobject/ItemIndexFactoryImpl.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 61827747b72d..fc024cc524b0 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -31,6 +31,7 @@ import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; +import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; @@ -93,6 +94,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl Date: Fri, 21 Apr 2023 07:55:24 +0200 Subject: [PATCH 126/686] Fix #8792, change column type of Process.parameters to TEXT --- dspace-api/src/main/java/org/dspace/scripts/Process.java | 4 ++++ .../V7.6_2023.04.19__process_parameters_to_text_type.sql | 9 +++++++++ 2 files changed, 13 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index ce41f46bdf7b..190d214a3c8d 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -21,6 +21,7 @@ import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; +import javax.persistence.Lob; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; @@ -35,6 +36,7 @@ import org.dspace.core.ReloadableEntity; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.hibernate.annotations.Type; /** * This class is the DB Entity representation of the Process object to be stored in the Database @@ -68,6 +70,8 @@ public class Process implements ReloadableEntity { @Enumerated(EnumType.STRING) private ProcessStatus processStatus; + @Lob + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") @Column(name = "parameters") private String parameters; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..f7e0e51d0bf7 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters TYPE TEXT; From d7c4d35d99582d0589a53d61d33216a68eb4272d Mon Sep 17 00:00:00 2001 From: "max.nuding" Date: Fri, 21 Apr 2023 11:52:24 +0200 Subject: [PATCH 127/686] Add H2 and Oracle DB migrations --- .../V7.6_2023.04.19__process_parameters_to_text_type.sql | 9 +++++++++ .../V7.6_2023.04.19__process_parameters_to_text_type.sql | 9 +++++++++ 2 files changed, 18 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..1028ba370c47 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..6b2dd705ea68 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process MODIFY (parameters CLOB); From f1f4fe4cdf02444f59fa46674e923addb604a07c Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 21 Apr 2023 16:54:54 -0500 Subject: [PATCH 128/686] Update to Postgres v13 (as v11 images are not being maintained). Add db.restore.yml upgrade script & enhance READMEs --- docker-compose.yml | 1 + dspace/src/main/docker-compose/README.md | 161 +++++++++++++++--- dspace/src/main/docker-compose/db.restore.yml | 26 +++ dspace/src/main/docker/README.md | 76 ++++++--- .../dspace-postgres-pgcrypto-curl/Dockerfile | 5 +- .../install-pgcrypto.sh | 22 ++- .../dspace-postgres-pgcrypto/Dockerfile | 5 +- 7 files changed, 236 insertions(+), 60 deletions(-) create mode 100644 dspace/src/main/docker-compose/db.restore.yml diff --git a/docker-compose.yml b/docker-compose.yml index dd26d6cf150b..36ba6af2c981 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -72,6 +72,7 @@ services: context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ environment: PGDATA: /pgdata + POSTGRES_PASSWORD: dspace networks: dspacenet: ports: diff --git a/dspace/src/main/docker-compose/README.md b/dspace/src/main/docker-compose/README.md index a83a466bdbba..35a6e6055433 100644 --- a/dspace/src/main/docker-compose/README.md +++ b/dspace/src/main/docker-compose/README.md @@ -1,4 +1,4 @@ -# Docker Compose Resources +# Docker Compose files for DSpace Backend *** :warning: **THESE IMAGES ARE NOT PRODUCTION READY** The below Docker Compose images/resources were built for development/testing only. Therefore, they may not be fully secured or up-to-date, and should not be used in production. @@ -6,27 +6,51 @@ If you wish to run DSpace on Docker in production, we recommend building your own Docker images. You are welcome to borrow ideas/concepts from the below images in doing so. But, the below images should not be used "as is" in any production scenario. *** -## root directory Resources + +## Overview +The scripts in this directory can be used to start the DSpace REST API (backend) in Docker. +Optionally, the DSpace User Interface (frontend) may also be started in Docker. + +For additional options/settings in starting the User Interface (frontend) in Docker, see the Docker Compose +documentation for the frontend: https://github.com/DSpace/dspace-angular/blob/main/docker/README.md + +## Primary Docker Compose Scripts (in root directory) +The root directory of this project contains the primary Dockerfiles & Docker Compose scripts +which are used to start the backend. + - docker-compose.yml - - Docker compose file to orchestrate DSpace 7 REST components -- docker-compose-cli - - Docker compose file to run DSpace CLI tasks within a running DSpace instance in Docker + - Docker compose file to orchestrate DSpace REST API (backend) components. + - Uses the `Dockerfile` in the same directory. +- docker-compose-cli.yml + - Docker compose file to run DSpace CLI (Command Line Interface) tasks within a running DSpace instance in Docker. See instructions below. + - Uses the `Dockerfile.cli` in the same directory. -## dspace/src/main/docker-compose resources +Documentation for all Dockerfiles used by these compose scripts can be found in the ["docker" folder README](../docker/README.md) + +## Additional Docker Compose tools (in ./dspace/src/main/docker-compose) - cli.assetstore.yml - Docker compose file that will download and install a default assetstore. + - The default assetstore is the configurable entities test dataset. Useful for [testing/demos of Entities](#Ingest Option 2 Ingest Entities Test Data). - cli.ingest.yml - - Docker compose file that will run an AIP ingest into DSpace 7. + - Docker compose file that will run an AIP ingest into DSpace 7. Useful for testing/demos with basic Items. - db.entities.yml - - Docker compose file that pre-populate a database instance using a SQL dump. The default dataset is the configurable entities test dataset. -- local.cfg - - Sets the environment used across containers run with docker-compose + - Docker compose file that pre-populate a database instance using a downloaded SQL dump. + - The default dataset is the configurable entities test dataset. Useful for [testing/demos of Entities](#Ingest Option 2 Ingest Entities Test Data). +- db.restore.yml + - Docker compose file that pre-populate a database instance using a *local* SQL dump (hardcoded to `./pgdump.sql`) + - Useful for restoring data from a local backup, or [Upgrading PostgreSQL in Docker](#Upgrading PostgreSQL in Docker) - docker-compose-angular.yml - - Docker compose file that will start a published DSpace angular container that interacts with the branch. + - Docker compose file that will start a published DSpace User Interface container that interacts with the branch. - docker-compose-shibboleth.yml - Docker compose file that will start a *test/demo* Shibboleth SP container (in Apache) that proxies requests to the DSpace container - ONLY useful for testing/development. NOT production ready. +- docker-compose-iiif.yml + - Docker compose file that will start a *test/demo* Cantaloupe image server container required for enabling IIIF support. + - ONLY useful for testing/development. NOT production ready. + +Documentation for all Dockerfiles used by these compose scripts can be found in the ["docker" folder README](../docker/README.md) + ## To refresh / pull DSpace images from Dockerhub ``` @@ -55,6 +79,12 @@ docker-compose -p d7 up -d docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/docker-compose-angular.yml up -d ``` +## Run DSpace REST and DSpace Angular from local branches + +*Allows you to run the backend from the "DSpace/DSpace" codebase while also running the frontend from the "DSpace/dspace-angular" codebase.* + +See documentation in [DSpace User Interface Docker instructions](https://github.com/DSpace/dspace-angular/blob/main/docker/README.md#run-dspace-rest-and-dspace-angular-from-local-branches). + ## Run DSpace 7 REST with a IIIF Image Server from your branch *Only useful for testing IIIF support in a development environment* @@ -67,7 +97,6 @@ docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/doc ``` ## Run DSpace 7 REST and Shibboleth SP (in Apache) from your branch - *Only useful for testing Shibboleth in a development environment* This Shibboleth container uses https://samltest.id/ as an IdP (see `../docker/dspace-shibboleth/`). @@ -143,21 +172,11 @@ The remainder of these instructions assume you are using ngrok (though other pro DSPACE_HOSTNAME=[subdomain].ngrok.io docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/docker-compose-angular.yml -f dspace/src/main/docker-compose/docker-compose-shibboleth.yml up -d ``` -## Run DSpace 7 REST and Angular from local branches +## Sample Test Data -_The system will be started in 2 steps. Each step shares the same docker network._ +### Ingesting test content from AIP files -From DSpace/DSpace -``` -docker-compose -p d7 up -d -``` - -From DSpace/DSpace-angular (build as needed) -``` -docker-compose -p d7 -f docker/docker-compose.yml up -d -``` - -## Ingest Option 1: Ingesting test content from AIP files into a running DSpace 7 instance +*Allows you to ingest a set of AIPs into your DSpace instance for testing/demo purposes.* These AIPs represent basic Communities, Collections and Items. Prerequisites - Start DSpace 7 using one of the options listed above @@ -173,8 +192,14 @@ Download a Zip file of AIP content and ingest test data docker-compose -p d7 -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli ``` -## Ingest Option 2: Ingest Entities Test Data -_Remove your d7 volumes if you already ingested content into your docker volumes_ +### Ingest Entities Test Data + +*Allows you to load Configurable Entities test data for testing/demo purposes.* + +Prerequisites +- Start DSpace 7 using one of the options listed above +- Build the DSpace CLI image if needed. See the instructions above. +- _Remove your d7 volumes if you already ingested content into your docker volumes_ Start DSpace REST with a postgres database dump downloaded from the internet. ``` @@ -212,3 +237,85 @@ Similarly, you can see the value of any DSpace configuration (in local.cfg or ds # Output the value of `dspace.ui.url` from running Docker instance docker-compose -p d7 -f docker-compose-cli.yml run --rm dspace-cli dsprop -p dspace.ui.url ``` + +NOTE: It is also possible to run CLI scripts directly on the "dspace" container (where the backend runs) +This can be useful if you want to pass environment variables which override DSpace configs. +``` +# Run the "./dspace database clean" command from the "dspace" container +# Before doing so, it sets "db.cleanDisabled=false". +# WARNING: This will delete all your data. It's just an example of how to do so. +docker-compose -p d7 exec -e "db__P__cleanDisabled=false" dspace /dspace/bin/dspace database clean +``` + +## Upgrading PostgreSQL in Docker + +Occasionally, we update our `dspace-postgres-*` images to use a new version of PostgreSQL. +Simply using the new image will likely throw errors as the pgdata (postgres data) directory is incompatible +with the new version of PostgreSQL. These errors look like: +``` +FATAL: database files are incompatible with server +DETAIL: The data directory was initialized by PostgreSQL version 11, which is not compatible with this version 13.10 +``` + +Here's how to fix those issues by migrating your old Postgres data to the new version of Postgres + +1. First, you must start up the older PostgreSQL image (to dump your existing data to a `*.sql` file) + ``` + # This command assumes you are using the process described above to start all your containers + docker-compose -p d7 up -d + ``` + * If you've already accidentally updated to the new PostgreSQL image, you have a few options: + * Pull down an older version of the image from Dockerhub (using a tag) + * Or, temporarily rebuild your local image with the old version of Postgres. For example: + ``` + # This command will rebuild using PostgreSQL v11 & tag it locally as "dspace-7_x" + docker build --build-arg POSTGRES_VERSION=11 -t dspace/dspace-postgres-pgcrypto:dspace-7_x ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + # Then restart container with that image + docker-compose -p d7 up -d + ``` +2. Dump your entire "dspace" database out of the old "dspacedb" container to a local file named `pgdump.sql` + ``` + # NOTE: WE HIGHLY RECOMMEND LOGGING INTO THE CONTAINER and doing the pg_dump within the container. + # If you attempt to run pg_dump from your local machine via docker "exec" (or similar), sometimes + # UTF-8 characters can be corrupted in the export file. This may result in data loss. + + # First login to the "dspacedb" container + docker exec -it dspacedb /bin/bash + + # Dump the "dspace" database to a file named "/tmp/pgdump.sql" within the container + pg_dump -U dspace dspace > /tmp/pgdump.sql + + # Exit the container + exit + + # Download (copy) that /tmp/pgdump.sql backup file from container to your local machine + docker cp dspacedb:/tmp/pgdump.sql . + ``` +3. Now, stop all existing containers. This shuts down the old version of PostgreSQL + ``` + # This command assumes you are using the process described above to start/stop all your containers + docker-compose -p d7 down + ``` +4. Delete the `pgdata` volume. WARNING: This deletes all your old PostgreSQL data. Make sure you have that `pgdump.sql` file FIRST! + ``` + # Assumes you are using `-p d7` which prefixes all volumes with `d7_` + docker volume rm d7_pgdata + ``` +5. Now, pull down the latest PostgreSQL image with the NEW version of PostgreSQL. + ``` + docker-compose -f docker-compose.yml -f docker-compose-cli.yml pull + ``` +6. Start everything up using our `db.restore.yml` script. This script will recreate the database +using the local `./pgdump.sql` file. IMPORTANT: If you renamed that "pgdump.sql" file or stored it elsewhere, +then you MUST change the name/directory in the `db.restore.yml` script. + ``` + # Restore database from "./pgdump.sql" (this path is hardcoded in db.restore.yml) + docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/db.restore.yml up -d + ``` +7. Finally, reindex all database contents into Solr (just to be sure Solr indexes are current). + ``` + # Run "./dspace index-discovery -b" using our CLI image + docker-compose -p d7 -f docker-compose-cli.yml run --rm dspace-cli index-discovery -b + ``` +At this point in time, all your old database data should be migrated to the new Postgres +and running at http://localhost:8080/server/ \ No newline at end of file diff --git a/dspace/src/main/docker-compose/db.restore.yml b/dspace/src/main/docker-compose/db.restore.yml new file mode 100644 index 000000000000..fc2f30b9d8e0 --- /dev/null +++ b/dspace/src/main/docker-compose/db.restore.yml @@ -0,0 +1,26 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +version: "3.7" + +# +# Overrides the default "dspacedb" container behavior to load a local SQL file into PostgreSQL. +# +# This can be used to restore a "dspacedb" container from a pg_dump, or during upgrade to a new version of PostgreSQL. +services: + dspacedb: + image: dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql + environment: + # Location where the dump SQL file will be available on the running container + - LOCALSQL=/tmp/pgdump.sql + volumes: + # Volume which shares a local SQL file at "./pgdump.sql" to the running container + # IF YOUR LOCAL FILE HAS A DIFFERENT NAME (or is in a different location), then change the "./pgdump.sql" + # portion of this line. + - ./pgdump.sql:/tmp/pgdump.sql + diff --git a/dspace/src/main/docker/README.md b/dspace/src/main/docker/README.md index f2a944f60851..ac1b4cb9236b 100644 --- a/dspace/src/main/docker/README.md +++ b/dspace/src/main/docker/README.md @@ -1,4 +1,4 @@ -# Docker images supporting DSpace +# Docker images supporting DSpace Backend *** :warning: **THESE IMAGES ARE NOT PRODUCTION READY** The below Docker Compose images/resources were built for development/testing only. Therefore, they may not be fully secured or up-to-date, and should not be used in production. @@ -6,9 +6,15 @@ If you wish to run DSpace on Docker in production, we recommend building your own Docker images. You are welcome to borrow ideas/concepts from the below images in doing so. But, the below images should not be used "as is" in any production scenario. *** -## Dockerfile.dependencies +## Overview +The Dockerfiles in this directory (and subdirectories) are used by our [Docker Compose scripts](../docker-compose/README.md). + +## Dockerfile.dependencies (in root folder) This Dockerfile is used to pre-cache Maven dependency downloads that will be used in subsequent DSpace docker builds. +Caching these Maven dependencies provides a speed increase to all later builds by ensuring the dependencies +are only downloaded once. + ``` docker build -t dspace/dspace-dependencies:dspace-7_x -f Dockerfile.dependencies . ``` @@ -22,12 +28,13 @@ Admins to our DockerHub repo can manually publish with the following command. docker push dspace/dspace-dependencies:dspace-7_x ``` -## Dockerfile.test +## Dockerfile.test (in root folder) -This Dockerfile builds a DSpace 7 Tomcat image (for testing/development). -This image deploys two DSpace webapps: +This Dockerfile builds a DSpace 7 backend image (for testing/development). +This image deploys two DSpace webapps to Tomcat running in Docker: 1. The DSpace 7 REST API (at `http://localhost:8080/server`) -2. The legacy (v6) REST API (at `http://localhost:8080//rest`), deployed without requiring HTTPS access. +2. The legacy (v6) REST API (at `http://localhost:8080/rest`), deployed without requiring HTTPS access. +This image also sets up debugging in Tomcat for development. ``` docker build -t dspace/dspace:dspace-7_x-test -f Dockerfile.test . @@ -42,12 +49,12 @@ Admins to our DockerHub repo can manually publish with the following command. docker push dspace/dspace:dspace-7_x-test ``` -## Dockerfile +## Dockerfile (in root folder) -This Dockerfile builds a DSpace 7 tomcat image. -This image deploys two DSpace webapps: +This Dockerfile builds a DSpace 7 backend image. +This image deploys one DSpace webapp to Tomcat running in Docker: 1. The DSpace 7 REST API (at `http://localhost:8080/server`) -2. The legacy (v6) REST API (at `http://localhost:8080//rest`), deployed *requiring* HTTPS access. + ``` docker build -t dspace/dspace:dspace-7_x -f Dockerfile . ``` @@ -61,9 +68,9 @@ Admins to our DockerHub repo can publish with the following command. docker push dspace/dspace:dspace-7_x ``` -## Dockefile.cli +## Dockerfile.cli (in root folder) -This Dockerfile builds a DSpace 7 CLI image, which can be used to run commandline tools via Docker. +This Dockerfile builds a DSpace 7 CLI (command line interface) image, which can be used to run DSpace's commandline tools via Docker. ``` docker build -t dspace/dspace-cli:dspace-7_x -f Dockerfile.cli . ``` @@ -77,46 +84,60 @@ Admins to our DockerHub repo can publish with the following command. docker push dspace/dspace-cli:dspace-7_x ``` -## dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +## ./dspace-postgres-pgcrypto/Dockerfile This is a PostgreSQL Docker image containing the `pgcrypto` extension required by DSpace 6+. +This image is built *automatically* after each commit is made to the `main` branch. + +How to build manually: ``` cd dspace/src/main/docker/dspace-postgres-pgcrypto -docker build -t dspace/dspace-postgres-pgcrypto . +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x . ``` -**This image is built manually.** It should be rebuilt as needed. +It is also possible to change the version of PostgreSQL or the PostgreSQL user's password during the build: +``` +cd dspace/src/main/docker/dspace-postgres-pgcrypto +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass . +``` A copy of this file exists in the DSpace 6 branch. A specialized version of this file exists for DSpace 4 in DSpace-Docker-Images. -Admins to our DockerHub repo can publish with the following command. +Admins to our DockerHub repo can (manually) publish with the following command. ``` -docker push dspace/dspace-postgres-pgcrypto +docker push dspace/dspace-postgres-pgcrypto:dspace-7_x ``` -## dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +## ./dspace-postgres-pgcrypto-curl/Dockerfile This is a PostgreSQL Docker image containing the `pgcrypto` extension required by DSpace 6+. This image also contains `curl`. The image is pre-configured to load a Postgres database dump on initialization. + +This image is built *automatically* after each commit is made to the `main` branch. + +How to build manually: ``` cd dspace/src/main/docker/dspace-postgres-pgcrypto-curl -docker build -t dspace/dspace-postgres-pgcrypto:loadsql . +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql . ``` -**This image is built manually.** It should be rebuilt as needed. +Similar to `dspace-postgres-pgcrypto` above, you can also modify the version of PostgreSQL or the PostgreSQL user's password. +See examples above. A copy of this file exists in the DSpace 6 branch. -Admins to our DockerHub repo can publish with the following command. +Admins to our DockerHub repo can (manually) publish with the following command. ``` -docker push dspace/dspace-postgres-pgcrypto:loadsql +docker push dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql ``` -## dspace/src/main/docker/dspace-shibboleth/Dockerfile +## ./dspace-shibboleth/Dockerfile This is a test / demo image which provides an Apache HTTPD proxy (in front of Tomcat) -with mod_shib & Shibboleth installed. It is primarily for usage for -testing DSpace's Shibboleth integration. It uses https://samltest.id/ as the Shibboleth IDP +with `mod_shib` & Shibboleth installed based on the +[DSpace Shibboleth configuration instructions](https://wiki.lyrasis.org/display/DSDOC7x/Authentication+Plugins#AuthenticationPlugins-ShibbolethAuthentication). +It is primarily for usage for testing DSpace's Shibboleth integration. +It uses https://samltest.id/ as the Shibboleth IDP **This image is built manually.** It should be rebuilt as needed. @@ -130,7 +151,7 @@ docker run -i -t -d -p 80:80 -p 443:443 dspace/dspace-shibboleth This image can also be rebuilt using the `../docker-compose/docker-compose-shibboleth.yml` script. -## dspace/src/main/docker/dspace-solr/Dockerfile +## ./dspace-solr/Dockerfile This Dockerfile builds a Solr image with DSpace Solr configsets included. It can be pulled / built following the [docker compose resources](../docker-compose/README.md) @@ -148,9 +169,10 @@ restart the `dspacesolr` container for the changes to be deployed. From DSpace r docker-compose -p d7 up --detach --build dspacesolr ``` -## test/ folder +## ./test/ folder These resources are bundled into the `dspace/dspace:dspace-*-test` image at build time. +See the `Dockerfile.test` section above for more information about the test image. ## Debugging Docker builds diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile index 3e38cdae27a3..b2131a740262 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile @@ -7,16 +7,17 @@ # # To build for example use: -# docker build --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass -f ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile . +# docker build --build-arg POSTGRES_VERSION=13 --build-arg POSTGRES_PASSWORD=mypass ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ # This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION-loadsql -ARG POSTGRES_VERSION=11 +ARG POSTGRES_VERSION=13 ARG POSTGRES_PASSWORD=dspace FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace +ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} # Install curl which is necessary to load SQL file RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh index 054d3dede5dc..3f8e95e1044f 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh @@ -11,15 +11,33 @@ set -e CHECKFILE=/pgdata/ingest.hasrun.flag +# If $LOADSQL environment variable set, use 'curl' to download that SQL and run it in PostgreSQL +# This can be used to initialize a database based on test data available on the web. if [ ! -f $CHECKFILE -a ! -z ${LOADSQL} ] then - curl ${LOADSQL} -L -s --output /tmp/dspace.sql - psql -U $POSTGRES_USER < /tmp/dspace.sql + # Download SQL file to /tmp/dspace-db-init.sql + curl ${LOADSQL} -L -s --output /tmp/dspace-db-init.sql + # Load into PostgreSQL + psql -U $POSTGRES_USER < /tmp/dspace-db-init.sql + # Remove downloaded file + rm /tmp/dspace-db-init.sql touch $CHECKFILE exit fi +# If $LOCALSQL environment variable set, then simply run it in PostgreSQL +# This can be used to restore data from a pg_dump or similar. +if [ ! -f $CHECKFILE -a ! -z ${LOCALSQL} ] +then + # Load into PostgreSQL + psql -U $POSTGRES_USER < ${LOCALSQL} + + touch $CHECKFILE + exit +fi + +# Then, setup pgcrypto on this database psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL -- Create a new schema in this database named "extensions" (or whatever you want to name it) CREATE SCHEMA extensions; diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile index bf8344103c44..7dde1a6bfd1c 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile @@ -7,16 +7,17 @@ # # To build for example use: -# docker build --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass -f ./dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile . +# docker build --build-arg POSTGRES_VERSION=13 --build-arg POSTGRES_PASSWORD=mypass ./dspace/src/main/docker/dspace-postgres-pgcrypto/ # This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION -ARG POSTGRES_VERSION=11 +ARG POSTGRES_VERSION=13 ARG POSTGRES_PASSWORD=dspace FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace +ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} # Copy over script which will initialize database and install pgcrypto extension COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ From 61845da1aab8ee7a80aab524b54144b05d0f90f0 Mon Sep 17 00:00:00 2001 From: Hardy Pottinger Date: Tue, 25 Apr 2023 11:17:24 -0500 Subject: [PATCH 129/686] Install unzip in Dockerfile.cli so cli.ingest.yml can work correctly (requires unzip) --- Dockerfile.cli | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.cli b/Dockerfile.cli index 6878d8f8dbfb..62e83b79ef02 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -33,9 +33,9 @@ WORKDIR /dspace-src ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH -# Need wget to install ant +# Need wget to install ant, and unzip for managing AIPs RUN apt-get update \ - && apt-get install -y --no-install-recommends wget \ + && apt-get install -y --no-install-recommends wget unzip \ && apt-get purge -y --auto-remove \ && rm -rf /var/lib/apt/lists/* # Download and install 'ant' From a15a41401b7725996a5e251319ed09444e949eb0 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 6 Mar 2023 09:34:29 -0600 Subject: [PATCH 130/686] Update Spring & Spring Boot versions --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 3b6f7edb6549..062544c91c41 100644 --- a/pom.xml +++ b/pom.xml @@ -19,9 +19,9 @@ 11 - 5.3.20 - 2.6.8 - 5.6.5 + 5.3.25 + 2.7.9 + 5.7.7 5.6.5.Final 6.0.23.Final 42.4.3 From 42f548b17e282caeef614e6e87012ad20c4775f9 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 10 Mar 2023 14:40:07 -0600 Subject: [PATCH 131/686] Remove check for double-encoded param. It is no longer double-encoded after upgrading Spring Boot. --- .../java/org/dspace/app/rest/DiscoveryRestControllerIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index 8b01e7b37790..a115c8aa2f15 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -6473,7 +6473,7 @@ public void discoverFacetsSupervisedByWithPrefixTest() throws Exception { .andExpect(jsonPath("$.facetType", equalTo("authority"))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", - containsString("api/discover/facets/supervisedBy?prefix=group%2520B&configuration=supervision"))) + containsString("api/discover/facets/supervisedBy?prefix=group%20B&configuration=supervision"))) //This is how the page object must look like because it's the default with size 20 .andExpect(jsonPath("$.page", is(PageMatcher.pageEntry(0, 20)))) From 66d7c1bde6295798cea551f1f463fccb9011ea6d Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 24 Mar 2023 09:16:52 -0500 Subject: [PATCH 132/686] Update to latest spring-boot and spring --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 062544c91c41..1b4d34ccbf98 100644 --- a/pom.xml +++ b/pom.xml @@ -19,8 +19,8 @@ 11 - 5.3.25 - 2.7.9 + 5.3.26 + 2.7.10 5.7.7 5.6.5.Final 6.0.23.Final From 8cd82e5d2e13b4fdca1fbc64634bbf825ced0708 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 25 Apr 2023 17:13:26 -0500 Subject: [PATCH 133/686] Update to Spring Boot v2.7.11 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 1b4d34ccbf98..aec4b57fc1ed 100644 --- a/pom.xml +++ b/pom.xml @@ -19,9 +19,9 @@ 11 - 5.3.26 - 2.7.10 - 5.7.7 + 5.3.27 + 2.7.11 + 5.7.8 5.6.5.Final 6.0.23.Final 42.4.3 From 535edef1b3498cf32b3214fe91af55a2a7ec5ae8 Mon Sep 17 00:00:00 2001 From: Abel Date: Fri, 28 Apr 2023 03:53:43 +0200 Subject: [PATCH 134/686] Add ImageMagickVideoThumbnailFilter --- .../ImageMagickVideoThumbnailFilter.java | 74 +++++++++++++++++++ dspace/config/dspace.cfg | 5 ++ 2 files changed, 79 insertions(+) create mode 100644 dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java new file mode 100644 index 000000000000..2f5ff9da0f65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.InputStream; +import java.io.IOException; +import java.nio.file.Files; + +import org.dspace.content.Item; +import org.im4java.core.ConvertCmd; +import org.im4java.core.IM4JavaException; +import org.im4java.core.IMOperation; + + +/** + * Filter video bitstreams, scaling the image to be within the bounds of + * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be + * no bigger than. Creates only JPEGs. + */ +public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter { + private static final int DEFAULT_WIDTH = 340; + private static final int DEFAULT_HEIGHT = 280; + + /** + * @param currentItem item + * @param source source input stream + * @param verbose verbose mode + * @return InputStream the resulting input stream + * @throws Exception if error + */ + @Override + public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) + throws Exception { + File f = inputStreamToTempFile(source, "imthumb", ".tmp"); + File f2 = null; + try { + f2 = getThumbnailFile(f, verbose); + byte[] bytes = Files.readAllBytes(f2.toPath()); + return new ByteArrayInputStream(bytes); + } finally { + //noinspection ResultOfMethodCallIgnored + f.delete(); + if (f2 != null) { + //noinspection ResultOfMethodCallIgnored + f2.delete(); + } + } + } + + public File getThumbnailFile(File f, boolean verbose) + throws IOException, InterruptedException, IM4JavaException { + File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + f2.deleteOnExit(); + ConvertCmd cmd = new ConvertCmd(); + IMOperation op = new IMOperation(); + op.autoOrient(); + op.addImage("VIDEO:" + f.getAbsolutePath() + "[100]"); + op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH), + configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT)); + op.addImage(f2.getAbsolutePath()); + if (verbose) { + System.out.println("IM Thumbnail Param: " + op); + } + cmd.run(op); + return f2; + } +} diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 2a35e894593a..138a690af45a 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -465,6 +465,9 @@ filter.plugins = PDFBox JPEG Thumbnail # remove "JPEG Thumbnail" from the plugin list # uncomment and insert the following line into the plugin list # ImageMagick Image Thumbnail, ImageMagick PDF Thumbnail, \ +# [To enable ImageMagick Video Thumbnails (requires both ImageMagick and ffmpeg installed)]: +# uncomment and insert the following line into the plugin list +# ImageMagick Video Thumbnail, \ #Assign 'human-understandable' names to each filter plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.TikaTextExtractionFilter = Text Extractor @@ -473,6 +476,7 @@ plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilte plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.PDFBoxThumbnail = PDFBox JPEG Thumbnail plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter = ImageMagick Image Thumbnail plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.ImageMagickPdfThumbnailFilter = ImageMagick PDF Thumbnail +plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.ImageMagickVideoThumbnailFilter = ImageMagick Video Thumbnail #Configure each filter's input format(s) # NOTE: The TikaTextExtractionFilter can support any file formats that are supported by Apache Tika. So, you can easily @@ -496,6 +500,7 @@ filter.org.dspace.app.mediafilter.JPEGFilter.inputFormats = BMP, GIF, JPEG, imag filter.org.dspace.app.mediafilter.BrandedPreviewJPEGFilter.inputFormats = BMP, GIF, JPEG, image/png filter.org.dspace.app.mediafilter.ImageMagickImageThumbnailFilter.inputFormats = BMP, GIF, image/png, JPG, TIFF, JPEG, JPEG 2000 filter.org.dspace.app.mediafilter.ImageMagickPdfThumbnailFilter.inputFormats = Adobe PDF +filter.org.dspace.app.mediafilter.ImageMagickVideoThumbnailFilter.inputFormats = Video MP4 filter.org.dspace.app.mediafilter.PDFBoxThumbnail.inputFormats = Adobe PDF #Publicly accessible thumbnails of restricted content. From 43661de74ea7e1f4dde4ea2dae26ed5a9e3f34f6 Mon Sep 17 00:00:00 2001 From: Abel Date: Fri, 28 Apr 2023 03:59:00 +0200 Subject: [PATCH 135/686] Add Override annotation --- .../dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java index 2f5ff9da0f65..f6f657eaa0a3 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -54,6 +54,7 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo } } + @Override public File getThumbnailFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); From d256065fc2795f43c6197b09f5ee26e04c7827a5 Mon Sep 17 00:00:00 2001 From: Abel Date: Fri, 28 Apr 2023 04:09:26 +0200 Subject: [PATCH 136/686] Set FRAME_NUMBER using a constant --- .../app/mediafilter/ImageMagickVideoThumbnailFilter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java index f6f657eaa0a3..c5ee4639d761 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -27,6 +27,7 @@ public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter { private static final int DEFAULT_WIDTH = 340; private static final int DEFAULT_HEIGHT = 280; + private static final int FRAME_NUMBER = 100; /** * @param currentItem item @@ -62,7 +63,7 @@ public File getThumbnailFile(File f, boolean verbose) ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); op.autoOrient(); - op.addImage("VIDEO:" + f.getAbsolutePath() + "[100]"); + op.addImage("VIDEO:" + f.getAbsolutePath() + "[" + FRAME_NUMBER + "]"); op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH), configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT)); op.addImage(f2.getAbsolutePath()); From 413c6f443f597a8addfd4db85974a2700c0fd6a5 Mon Sep 17 00:00:00 2001 From: Abel Date: Fri, 28 Apr 2023 04:28:38 +0200 Subject: [PATCH 137/686] Use same defaults than ImageMagickThumbnailFilter --- .../app/mediafilter/ImageMagickVideoThumbnailFilter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java index c5ee4639d761..6f8706d774df 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -25,8 +25,8 @@ * no bigger than. Creates only JPEGs. */ public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter { - private static final int DEFAULT_WIDTH = 340; - private static final int DEFAULT_HEIGHT = 280; + private static final int DEFAULT_WIDTH = 180; + private static final int DEFAULT_HEIGHT = 120; private static final int FRAME_NUMBER = 100; /** From f7414f1d58c5a80dc1ac88017b1facc6451235f5 Mon Sep 17 00:00:00 2001 From: Abel Date: Fri, 28 Apr 2023 04:40:23 +0200 Subject: [PATCH 138/686] Add a comment about ImageMagick policies --- dspace/config/dspace.cfg | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 138a690af45a..62d429a7e205 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -468,6 +468,9 @@ filter.plugins = PDFBox JPEG Thumbnail # [To enable ImageMagick Video Thumbnails (requires both ImageMagick and ffmpeg installed)]: # uncomment and insert the following line into the plugin list # ImageMagick Video Thumbnail, \ +# NOTE: pay attention to the ImageMagick policies and reource limits in its policy.xml +# configuration file. The limits may have to be increased if a "cache resources +# exhausted" error is thrown. #Assign 'human-understandable' names to each filter plugin.named.org.dspace.app.mediafilter.FormatFilter = org.dspace.app.mediafilter.TikaTextExtractionFilter = Text Extractor From 1138667f3a3fc4c4ee76667b79e882bfcd086daa Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Fri, 28 Apr 2023 09:50:12 +0200 Subject: [PATCH 139/686] [CST-6402] fix saf import via remote url: the url param is a string --- .../org/dspace/app/itemimport/ItemImportScriptConfiguration.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index cfe97ad89bd6..2d304d8a7dc8 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -67,7 +67,6 @@ public Options getOptions() { .hasArg().build()); options.addOption(Option.builder("u").longOpt("url") .desc("url of zip file") - .type(InputStream.class) .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") From dd1a5188b60514e7890b7dde8457f9f1e4df1338 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Fri, 28 Apr 2023 09:51:50 +0200 Subject: [PATCH 140/686] [CST-6402] manage saf import via remote url in CLI mode --- .../org/dspace/app/itemimport/ItemImport.java | 6 ++-- .../dspace/app/itemimport/ItemImportCLI.java | 33 ++++++++++++++++--- .../ItemImportCLIScriptConfiguration.java | 3 ++ 3 files changed, 36 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index 2b1089e3e01f..ac9db7605103 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -79,7 +79,7 @@ public class ItemImport extends DSpaceRunnable { protected String zipfilename = null; protected boolean help = false; protected File workDir = null; - private File workFile = null; + protected File workFile = null; protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); @@ -239,6 +239,9 @@ public void internalRun() throws Exception { if (zip) { FileUtils.deleteDirectory(new File(sourcedir)); FileUtils.deleteDirectory(workDir); + if (remoteUrl && workFile != null && workFile.exists()) { + workFile.delete(); + } } Date endTime = new Date(); @@ -308,7 +311,6 @@ protected void process(Context context, ItemImportService itemImportService, handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE); } finally { mapFile.delete(); - workFile.delete(); } } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 35de7b443a97..08ecdd097c16 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,10 +8,14 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.InputStream; +import java.net.URL; import java.sql.SQLException; import java.util.List; +import java.util.Optional; import java.util.UUID; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.content.Collection; @@ -96,10 +100,25 @@ protected void process(Context context, ItemImportService itemImportService, protected void readZip(Context context, ItemImportService itemImportService) throws Exception { // If this is a zip archive, unzip it first if (zip) { - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR - + File.separator + context.getCurrentUser().getID()); - sourcedir = itemImportService.unzip( - new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + if (!remoteUrl) { + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip( + new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + } else { + // manage zip via remote url + Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (optionalFileStream.isPresent()) { + workFile = new File(itemImportService.getTempWorkDir() + File.separator + + zipfilename + "-" + context.getCurrentUser().getID()); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + } else { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } } } @@ -120,6 +139,12 @@ protected void setZip() { zip = true; zipfilename = commandLine.getOptionValue('z'); } + + if (commandLine.hasOption('u')) { // remote url + zip = true; + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } } @Override diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java index d265cbf4a1d6..89abd7155b39 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java @@ -37,6 +37,9 @@ public Options getOptions() { options.addOption(Option.builder("z").longOpt("zip") .desc("name of zip file") .hasArg().required(false).build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") .hasArg().required(false).build()); From 77f6daf0a32e0266fe92aa7c59ec91927ddcbbe9 Mon Sep 17 00:00:00 2001 From: Abel Date: Fri, 28 Apr 2023 13:36:20 +0200 Subject: [PATCH 141/686] Change import order --- .../dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java index 6f8706d774df..4221a514d7d5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -9,8 +9,8 @@ import java.io.ByteArrayInputStream; import java.io.File; -import java.io.InputStream; import java.io.IOException; +import java.io.InputStream; import java.nio.file.Files; import org.dspace.content.Item; From c919df81fb15868169a0a14d5be42079fe0c3908 Mon Sep 17 00:00:00 2001 From: Michael Plate Date: Fri, 28 Apr 2023 17:09:55 +0200 Subject: [PATCH 142/686] LDAP Auth extended for many groups --- .../authenticate/LDAPAuthentication.java | 90 ++++++++++++------- 1 file changed, 59 insertions(+), 31 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index f3c6022e02c2..afd82db863ba 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -11,9 +11,11 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Hashtable; +import java.util.Iterator; import java.util.List; import javax.naming.NamingEnumeration; import javax.naming.NamingException; @@ -64,6 +66,7 @@ * @author Reuben Pasquini * @author Samuel Ottenhoff * @author Ivan Masár + * @author Michael Plate */ public class LDAPAuthentication implements AuthenticationMethod { @@ -391,7 +394,7 @@ private static class SpeakerToLDAP { protected String ldapGivenName = null; protected String ldapSurname = null; protected String ldapPhone = null; - protected String ldapGroup = null; + protected ArrayList ldapGroup = null; /** * LDAP settings @@ -406,9 +409,9 @@ private static class SpeakerToLDAP { final String ldap_surname_field; final String ldap_phone_field; final String ldap_group_field; - final boolean useTLS; + SpeakerToLDAP(Logger thelog) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -547,7 +550,11 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con if (attlist[4] != null) { att = atts.get(attlist[4]); if (att != null) { - ldapGroup = (String) att.get(); + // loop through all groups returned by LDAP + ldapGroup = new ArrayList(); + for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { + ldapGroup.add((String) val.next()); + } } } @@ -693,48 +700,69 @@ public String getName() { /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. + * + * @param dn + * The string containing distinguished name of the user + * + * @param group + * List of strings with LDAP dn of groups + * + * @param context + * DSpace context */ - private void assignGroups(String dn, String group, Context context) { + private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); int i = 1; String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); - boolean cmp; + + // groupmap contains the mapping of LDAP groups to DSpace groups + // outer loop with the DSpace groups while (groupMap != null) { String t[] = groupMap.split(":"); String ldapSearchString = t[0]; String dspaceGroupName = t[1]; - if (group == null) { - cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); - } else { - cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString); - } + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); - } else { - // The group does not exist - log.warn(LogHelper.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + } else { + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + // assign user to this group + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + i + + " does not exist :: " + dspaceGroupName)); + } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); } - } catch (AuthorizeException ae) { - log.debug(LogHelper.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); } } From fdf4a4d5712e9b1be3a9966dc080af90db002243 Mon Sep 17 00:00:00 2001 From: Chandaluri Vamsi Krishna Date: Sun, 30 Apr 2023 22:32:42 +0530 Subject: [PATCH 143/686] Updated to fetch dynamic default description Signed-off-by: Chandaluri Vamsi Krishna --- .../src/main/java/org/dspace/app/util/SyndicationFeed.java | 3 ++- dspace-api/src/main/resources/Messages.properties | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 5b0520a3e1d1..c1402499c444 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -51,6 +51,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.core.I18nUtil; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; @@ -233,7 +234,7 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec localize(labels, MSG_FEED_TITLE) : defaultTitle); if (defaultDescriptionField == null || defaultDescriptionField == "") { - defaultDescriptionField = "No Description"; + defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description"); } feed.setDescription(defaultDescriptionField); diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index c478e4e69b2e..9be443f5eaec 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -51,6 +51,7 @@ metadata.bitstream.iiif-virtual.bytes = File size metadata.bitstream.iiif-virtual.checksum = Checksum org.dspace.app.itemexport.no-result = The DSpaceObject that you specified has no items. +org.dspace.app.util.SyndicationFeed.no-description = No Description org.dspace.checker.ResultsLogger.bitstream-format = Bitstream format org.dspace.checker.ResultsLogger.bitstream-found = Bitstream found org.dspace.checker.ResultsLogger.bitstream-id = Bitstream ID From 80af0665c12b3707351d45db962975657ebfe7c9 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Mon, 1 May 2023 15:15:28 -0500 Subject: [PATCH 144/686] send "forgot password" email if user tries to register with email address already in eperson table --- .../RegistrationRestRepository.java | 36 ++++++++++++------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java index ac44ccb4c274..c3c94a4d0bf0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java @@ -138,21 +138,31 @@ public RegistrationRest createAndReturn(Context context) { + registrationRest.getEmail(), e); } } else if (accountType.equalsIgnoreCase(TYPE_REGISTER)) { - try { - String email = registrationRest.getEmail(); - if (!AuthorizeUtil.authorizeNewAccountRegistration(context, request)) { - throw new AccessDeniedException( - "Registration is disabled, you are not authorized to create a new Authorization"); + if (eperson == null) { + try { + String email = registrationRest.getEmail(); + if (!AuthorizeUtil.authorizeNewAccountRegistration(context, request)) { + throw new AccessDeniedException( + "Registration is disabled, you are not authorized to create a new Authorization"); + } + if (!authenticationService.canSelfRegister(context, request, email)) { + throw new UnprocessableEntityException( + String.format("Registration is not allowed with email address" + + " %s", email)); + } + accountService.sendRegistrationInfo(context, email); + } catch (SQLException | IOException | MessagingException | AuthorizeException e) { + log.error("Something went wrong with sending registration info email: " + + registrationRest.getEmail(), e); } - if (!authenticationService.canSelfRegister(context, request, email)) { - throw new UnprocessableEntityException( - String.format("Registration is not allowed with email address" + - " %s", email)); + } else { + // if an eperson with this email already exists then send "forgot password" email instead + try { + accountService.sendForgotPasswordInfo(context, registrationRest.getEmail()); + } catch (SQLException | IOException | MessagingException | AuthorizeException e) { + log.error("Something went wrong with sending forgot password info email: " + + registrationRest.getEmail(), e); } - accountService.sendRegistrationInfo(context, email); - } catch (SQLException | IOException | MessagingException | AuthorizeException e) { - log.error("Something went wrong with sending registration info email: " - + registrationRest.getEmail(), e); } } return null; From ef138cf76d4ef42df57fb90454667030f181842f Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Tue, 2 May 2023 09:19:32 +0200 Subject: [PATCH 145/686] [CST-6402] fix saf import via remote url: the source param is not used --- .../src/main/java/org/dspace/app/itemimport/ItemImportCLI.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 08ecdd097c16..1a71a8c4c09e 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -66,7 +66,7 @@ protected void validate(Context context) { handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); throw new UnsupportedOperationException("Must run with either add, replace, or remove"); } else if ("add".equals(command) || "replace".equals(command)) { - if (sourcedir == null) { + if (!remoteUrl && sourcedir == null) { handler.logError("A source directory containing items must be set (run with -h flag for details)"); throw new UnsupportedOperationException("A source directory containing items must be set"); } From 603cd9a19b7ad23a4b1d4923370c8508bbea75fc Mon Sep 17 00:00:00 2001 From: Ma-Tador <86478723+Ma-Tador@users.noreply.github.com> Date: Tue, 2 May 2023 12:11:44 +0200 Subject: [PATCH 146/686] Remove white spaces to comply with DSpace checkstyle --- .../app/rest/repository/RegistrationRestRepository.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java index 2cc14c1ff705..98f129b4bf9d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java @@ -87,7 +87,7 @@ public RegistrationRest findOne(Context context, Integer integer) { public Page findAll(Context context, Pageable pageable) { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); } - + @Override public RegistrationRest createAndReturn(Context context) { HttpServletRequest request = requestService.getCurrentRequest().getHttpServletRequest(); @@ -136,7 +136,7 @@ public RegistrationRest createAndReturn(Context context) { log.error("Something went wrong with sending forgot password info email: " + registrationRest.getEmail(), e); } - } else if (accountType.equalsIgnoreCase(TYPE_REGISTER)) { + } else if (accountType.equalsIgnoreCase(TYPE_REGISTER)) { try { String email = registrationRest.getEmail(); if (!AuthorizeUtil.authorizeNewAccountRegistration(context, request)) { @@ -157,7 +157,6 @@ public RegistrationRest createAndReturn(Context context) { return null; } - @Override public Class getDomainClass() { return RegistrationRest.class; From 518cdb7ff0b6d519f56f45cfa50fd49b61ce9c42 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Tue, 2 May 2023 14:42:33 +0300 Subject: [PATCH 147/686] [DURACOM-126] solved Submission with a validation error on a hidden step can't be submitted --- .../dspace/app/util/SubmissionStepConfig.java | 29 +++++ .../dspaceFolder/config/item-submission.xml | 37 ++++++- .../dspaceFolder/config/submission-forms.xml | 29 +++++ .../converter/AInprogressItemConverter.java | 4 + .../SubmissionDefinitionsControllerIT.java | 100 +++++++++++------- .../app/rest/SubmissionFormsControllerIT.java | 66 ++++++++---- .../rest/WorkflowItemRestRepositoryIT.java | 31 ++++++ .../rest/WorkspaceItemRestRepositoryIT.java | 29 +++++ dspace/config/item-submission.xml | 1 - 9 files changed, 266 insertions(+), 60 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java index 5506b3c23f1e..1a8f2744b818 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java @@ -11,6 +11,9 @@ import java.util.Map; import org.apache.commons.lang3.BooleanUtils; +import org.dspace.content.InProgressSubmission; +import org.dspace.content.WorkspaceItem; +import org.hibernate.proxy.HibernateProxyHelper; /** * Class representing configuration for a single step within an Item Submission @@ -173,6 +176,32 @@ public String getVisibilityOutside() { return visibilityOutside; } + public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) { + + String scopeToCheck = getScope(obj); + + if (scope == null || scopeToCheck == null) { + return false; + } + + String visibility = getVisibility(); + String visibilityOutside = getVisibilityOutside(); + + if (scope.equalsIgnoreCase(scopeToCheck)) { + return "hidden".equalsIgnoreCase(visibility); + } else { + return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside); + } + + } + + private String getScope(InProgressSubmission obj) { + if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) { + return "submission"; + } + return "workflow"; + } + /** * Get the number of this step in the current Submission process config. * Step numbers start with #0 (although step #0 is ALWAYS the special diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 6d8ae0c2f0d9..452460501a54 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -23,6 +23,7 @@ + @@ -54,7 +55,6 @@ org.dspace.app.rest.submit.step.CollectionStep collection - submission submit.progressbar.describe.stepone @@ -149,6 +149,34 @@ org.dspace.app.rest.submit.step.ShowIdentifiersStep identifiers + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + workflow + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + submission + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + submission + + @@ -222,6 +250,13 @@ + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 7438fda85212..6b7349616e2d 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -436,6 +436,35 @@ it, please enter the types and the actual numbers or codes. + +

+ + + dc + title + + false + + onebox + Field required + + + + +
+ + + dc + type + + false + + onebox + Field required + + +
+ diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java index ce7ca349180d..fa1d145011f7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java @@ -86,6 +86,10 @@ protected void fillFromModel(T obj, R witem, Projection projection) { for (SubmissionSectionRest sections : def.getPanels()) { SubmissionStepConfig stepConfig = submissionSectionConverter.toModel(sections); + if (stepConfig.isHiddenForInProgressSubmission(obj)) { + continue; + } + /* * First, load the step processing class (using the current * class loader) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index e7d43ec4d620..babb1fac2326 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -257,10 +257,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -268,7 +268,7 @@ public void findAllPaginationTest() throws Exception { .param("page", "1")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("test-hidden"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -285,8 +285,8 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page="), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) .andExpect(jsonPath("$.page.number", is(1))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -294,30 +294,56 @@ public void findAllPaginationTest() throws Exception { .param("page", "2")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=1"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=3"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=3"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=2"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=2"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) .andExpect(jsonPath("$.page.number", is(2))); + getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") + .param("size", "1") + .param("page", "3")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess"))) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=0"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=2"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.next.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=3"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$.page.size", is(1))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) + .andExpect(jsonPath("$.page.number", is(3))); + getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") .param("size", "1") - .param("page", "3")) + .param("page", "4")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("qualdroptest"))) @@ -326,24 +352,24 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=2"), Matchers.containsString("size=1")))) + Matchers.containsString("page=3"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.next.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=3"), Matchers.containsString("size=1")))) + Matchers.containsString("page=4"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) - .andExpect(jsonPath("$.page.number", is(3))); + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) + .andExpect(jsonPath("$.page.number", is(4))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") .param("size", "1") - .param("page", "4")) + .param("page", "5")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("extractiontestprocess"))) @@ -352,20 +378,20 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=3"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.next.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) - .andExpect(jsonPath("$.page.number", is(4))); + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) + .andExpect(jsonPath("$.page.number", is(5))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index 241bdefe2108..cf1e0c7c76a9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -67,13 +67,13 @@ public void findAll() throws Exception { .andExpect(content().contentType(contentType)) //The configuration file for the test env includes 6 forms .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) .andExpect(jsonPath("$.page.totalPages", equalTo(1))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect( jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) //The array of submissionforms should have a size of 8 - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8)))) + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10)))) ; } @@ -84,12 +84,12 @@ public void findAllWithNewlyCreatedAccountTest() throws Exception { .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) .andExpect(jsonPath("$.page.totalPages", equalTo(1))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8)))); + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10)))); } @Test @@ -696,10 +696,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -707,8 +707,8 @@ public void findAllPaginationTest() throws Exception { .param("page", "1")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("languagetest"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-workflow-hidden"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("languagetest"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -723,10 +723,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=2"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(1))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -734,8 +734,8 @@ public void findAllPaginationTest() throws Exception { .param("page", "2")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-submission-hidden"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -747,10 +747,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=2"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(2))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -758,7 +758,8 @@ public void findAllPaginationTest() throws Exception { .param("page", "3")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -770,10 +771,33 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=3"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(3))); + + getClient(tokenAdmin).perform(get("/api/config/submissionforms") + .param("size", "2") + .param("page", "4")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) + .andExpect(jsonPath("$.page.number", is(4))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java index c43821d4a013..72612fc5eb53 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java @@ -2122,4 +2122,35 @@ public void whenWorkspaceitemBecomeWorkflowitemWithAccessConditionsTheItemMustBe WorkflowItemBuilder.deleteWorkflowItem(idRef.get()); } } + + @Test + public void testWorkflowWithHiddenSections() throws Exception { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, parentCommunity, "123456789/test-hidden") + .withName("Collection 1") + .withWorkflowGroup(1, eperson) + .build(); + + XmlWorkflowItem workflowItem = WorkflowItemBuilder.createWorkflowItem(context, collection) + .withTitle("Workflow Item") + .build(); + + context.restoreAuthSystemState(); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/workflow/workflowitems/" + workflowItem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sections.test-outside-workflow-hidden").exists()) + .andExpect(jsonPath("$.sections.test-outside-submission-hidden").doesNotExist()) + .andExpect(jsonPath("$.sections.test-never-hidden").exists()) + .andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist()); + + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index 6c975264257a..ad9b8046f25a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -8566,4 +8566,33 @@ public void patchBySupervisorTest() throws Exception { ))); } + @Test + public void testSubmissionWithHiddenSections() throws Exception { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, parentCommunity, "123456789/test-hidden") + .withName("Collection 1") + .build(); + + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .withIssueDate("2023-01-01") + .build(); + + context.restoreAuthSystemState(); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/submission/workspaceitems/" + workspaceItem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sections.test-outside-workflow-hidden").doesNotExist()) + .andExpect(jsonPath("$.sections.test-outside-submission-hidden").exists()) + .andExpect(jsonPath("$.sections.test-never-hidden").exists()) + .andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist()); + + } } diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 2ab26dcf57df..9fbcb776d15b 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -79,7 +79,6 @@ org.dspace.app.rest.submit.step.CollectionStep collection - submission diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 35206d6ee291..9badeb2fe80a 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -22,38 +22,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - postgres-support - - - !db.name - - - - - org.postgresql - postgresql - - - - - javax.servlet diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 8f5a6f84f492..dd98bf0cbdd1 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -61,22 +61,6 @@
- - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - unit-test-environment diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index a7c9b5922c62..115393b7db20 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -90,24 +90,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 9b696fa0cbda..41ddb94be5a9 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -244,22 +244,6 @@ just adding new jar in the classloader - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - diff --git a/pom.xml b/pom.xml index 3b6f7edb6549..fba97314a5ae 100644 --- a/pom.xml +++ b/pom.xml @@ -1617,11 +1617,6 @@ icu4j 62.1 - - com.oracle - ojdbc6 - 11.2.0.4.0 - org.dspace From 6fbf97a6b39502f440ac1b6be2094ee40871d7d7 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 2 May 2023 11:57:47 -0500 Subject: [PATCH 152/686] Remove oracle mentions from configs / READMEs --- .../storage/rdbms/sqlmigration/h2/README.md | 28 +++++++------------ .../rdbms/sqlmigration/postgres/README.md | 7 +++-- dspace/config/dspace.cfg | 18 +++--------- dspace/config/local.cfg.EXAMPLE | 18 +++--------- 4 files changed, 22 insertions(+), 49 deletions(-) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md index 8088c6ccca62..87e114ca53a5 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md @@ -4,33 +4,25 @@ in Production. Instead, DSpace uses the H2 Database to perform Unit Testing during development. -By default, the DSpace Unit Testing environment configures H2 to run in -"Oracle Mode" and initializes the H2 database using the scripts in this directory. -These database migrations are automatically called by [Flyway](http://flywaydb.org/) -when the `DatabaseManager` initializes itself (see `initializeDatabase()` method). - -The H2 migrations in this directory are *based on* the Oracle Migrations, but -with some modifications in order to be valid in H2. - -## Oracle vs H2 script differences +By default, the DSpace Unit Testing environment configures H2 to run in memory +and initializes the H2 database using the scripts in this directory. See +`[src]/dspace-api/src/test/data/dspaceFolder/config/local.cfg`. -One of the primary differences between the Oracle scripts and these H2 ones -is in the syntax of the `ALTER TABLE` command. Unfortunately, H2's syntax for -that command differs greatly from Oracle (and PostgreSQL as well). +These database migrations are automatically called by [Flyway](http://flywaydb.org/) +in `DatabaseUtils`. -Most of the remainder of the scripts contain the exact Oracle syntax (which is -usually valid in H2). But, to you can always `diff` scripts of the same name -for further syntax differences. +The H2 migrations in this directory all use H2's grammar/syntax. +For additional info see the [H2 SQL Grammar](https://www.h2database.com/html/grammar.html). -For additional info see the [H2 SQL Grammar](http://www.h2database.com/html/grammar.html). ## More Information on Flyway The SQL scripts in this directory are H2-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md index 72eb279912b5..e16e4c6d4c91 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md @@ -3,8 +3,9 @@ The SQL scripts in this directory are PostgreSQL-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. @@ -22,7 +23,7 @@ Please see the Flyway Documentation for more information: http://flywaydb.org/ The `update-sequences.sql` script in this directory may still be used to update your internal database counts if you feel they have gotten out of "sync". This may sometimes occur after large restores of content (e.g. when using the DSpace -[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore) +[AIP Backup and Restore](https://wiki.lyrasis.org/display/DSDOC7x/AIP+Backup+and+Restore) feature). This `update-sequences.sql` script can be executed by running diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 65b1f951fad3..aee08c8e6009 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -74,23 +74,15 @@ solr.multicorePrefix = # solr.client.timeToLive = 600 ##### Database settings ##### -# DSpace only supports two database types: PostgreSQL or Oracle -# PostgreSQL is highly recommended. -# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 +# DSpace ONLY supports PostgreSQL at this time. # URL for connecting to database -# * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace -# JDBC Driver -# * For Postgres: org.postgresql.Driver -# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver +# JDBC Driver for PostgreSQL db.driver = org.postgresql.Driver -# Database Dialect (for Hibernate) -# * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect +# PostgreSQL Database Dialect (for Hibernate) db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -98,9 +90,7 @@ db.username = dspace db.password = dspace # Database Schema name -# * For Postgres, this is often "public" (default schema) -# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, -# so this may be set to ${db.username} in most scenarios. +# For PostgreSQL, this is often "public" (default schema) db.schema = public ## Database Connection pool parameters diff --git a/dspace/config/local.cfg.EXAMPLE b/dspace/config/local.cfg.EXAMPLE index cf13a47d762c..7176ed275a51 100644 --- a/dspace/config/local.cfg.EXAMPLE +++ b/dspace/config/local.cfg.EXAMPLE @@ -71,23 +71,15 @@ dspace.name = DSpace at My University ########################## # DATABASE CONFIGURATION # ########################## -# DSpace only supports two database types: PostgreSQL or Oracle -# PostgreSQL is highly recommended. -# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 +# DSpace ONLY supports PostgreSQL at this time. # URL for connecting to database -# * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace -# JDBC Driver -# * For Postgres: org.postgresql.Driver -# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver +# JDBC Driver for PostgreSQL db.driver = org.postgresql.Driver -# Database Dialect (for Hibernate) -# * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect +# PostgreSQL Database Dialect (for Hibernate) db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -95,9 +87,7 @@ db.username = dspace db.password = dspace # Database Schema name -# * For Postgres, this is often "public" (default schema) -# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, -# so this may be set to ${db.username} in most scenarios. +# For PostgreSQL, this is often "public" (default schema) db.schema = public ## Connection pool parameters From 140cdc6de07a2263857372337257c56b8c4296fe Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 2 May 2023 12:09:30 -0500 Subject: [PATCH 153/686] Remove all oracle-specific migration scripts --- .../oracle/upgradeToFlyway4x.sql | 29 - ...1__CollectionCommunity_Metadata_Handle.sql | 90 --- .../rdbms/sqlmigration/oracle/README.md | 84 --- ...tial_DSpace_1.2_Oracle_database_schema.sql | 550 ------------------ .../V1.3__Upgrade_to_DSpace_1.3_schema.sql | 57 -- ...V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql | 133 ----- .../V1.4__Upgrade_to_DSpace_1.4_schema.sql | 371 ------------ .../V1.5__Upgrade_to_DSpace_1.5_schema.sql | 142 ----- .../V1.6__Upgrade_to_DSpace_1.6_schema.sql | 93 --- .../V1.7__Upgrade_to_DSpace_1.7_schema.sql | 20 - .../V1.8__Upgrade_to_DSpace_1.8_schema.sql | 23 - .../V3.0__Upgrade_to_DSpace_3.x_schema.sql | 52 -- .../V4.0__Upgrade_to_DSpace_4.x_schema.sql | 88 --- ....9_2015.10.26__DS-2818_registry_update.sql | 64 -- ...08.08__DS-1945_Helpdesk_Request_a_Copy.sql | 20 - ...9.26__DS-1582_Metadata_For_All_Objects.sql | 333 ----------- .../oracle/V5.6_2016.08.23__DS-3097.sql | 24 - ..._metadatavalue_resource_type_id_column.sql | 23 - ...015.03.07__DS-2701_Hibernate_migration.sql | 469 --------------- ..._03_06_01__DS_3378_lost_oracle_indexes.sql | 18 - .../oracle/V6.0_2016.01.03__DS-3024.sql | 25 - ...02.25__DS-3004-slow-searching-as-admin.sql | 30 - ...04.01__DS-1955_Increase_embargo_reason.sql | 25 - ...016.04.04__DS-3086-OAI-Performance-fix.sql | 46 -- ...125-fix-bundle-bitstream-delete-rights.sql | 33 -- ...DS-3168-fix-requestitem_item_id_column.sql | 24 - .../oracle/V6.0_2016.07.21__DS-2775.sql | 30 - ...6.07.26__DS-3277_fix_handle_assignment.sql | 44 -- .../oracle/V6.0_2016.08.23__DS-3097.sql | 24 - .../V6.0_2016.11.29__DS-3410-lost-indexes.sql | 17 - .../oracle/V6.0_2016.11.30__DS-3409.sql | 16 - ...2017.10.12__DS-3542-stateless-sessions.sql | 20 - .../V7.0_2018.04.16__dspace-entities.sql | 65 --- .../V7.0_2018.06.07__DS-3851-permission.sql | 24 - ....05.02__DS-4239-workflow-xml-migration.sql | 17 - ..._2019.07.31__Retrieval_of_name_variant.sql | 18 - ....13__relationship_type_copy_left_right.sql | 14 - .../V7.0_2019_06_14__scripts-and-process.sql | 40 -- ..._2020.01.08__DS-626-statistics-tracker.sql | 29 - ...V7.0_2021.01.22__Remove_basic_workflow.sql | 17 - .../oracle/V7.0_2021.02.08__tilted_rels.sql | 13 - ....18__Move_entity_type_to_dspace_schema.sql | 56 -- ..._type_from_item_template_to_collection.sql | 28 - ...ction_table_drop_workflow_stem_columns.sql | 15 - ...DV_place_after_migrating_from_DSpace_5.sql | 24 - ....3_2022.04.29__orcid_queue_and_history.sql | 54 -- .../V7.3_2022.05.16__Orcid_token_table.sql | 24 - .../V7.3_2022.06.16__process_to_group.sql | 18 - ...on_status_column_to_relationship_table.sql | 10 - ...mter_change_columns_subscription_table.sql | 45 -- ...5_2022.12.09__Supervision_Orders_table.sql | 78 --- .../V7.5_2022.12.15__system_wide_alerts.sql | 22 - ...pdate_PNG_in_bitstream_format_registry.sql | 17 - ..._and_history_descriptions_to_text_type.sql | 10 - .../sqlmigration/oracle/update-sequences.sql | 77 --- .../V5.7_2017.05.05__DS-3431.sql | 503 ---------------- ...8.11__DS-2701_Basic_Workflow_Migration.sql | 37 -- .../V6.1_2017.01.03__DS-3431.sql | 503 ---------------- ....08.11__DS-2701_Xml_Workflow_Migration.sql | 141 ----- ....0_2018.04.03__upgrade_workflow_policy.sql | 27 - .../oracle/data_workflow_migration.sql | 377 ------------ .../v6.0__DS-2701_data_workflow_migration.sql | 377 ------------ .../v6.0__DS-2701_xml_workflow_migration.sql | 124 ---- .../oracle/xml_workflow_migration.sql | 124 ---- 64 files changed, 5945 deletions(-) delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql deleted file mode 100644 index 7907fccc00ae..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- Copyright 2010-2017 Boxfuse GmbH --- --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. --- ------------------ --- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql --- --- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------- - -DROP INDEX "${schema}"."${table}_vr_idx"; -DROP INDEX "${schema}"."${table}_ir_idx"; -ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; -ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; -ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; -ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); -UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql deleted file mode 100644 index fff1fe154f57..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql +++ /dev/null @@ -1,90 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create COMMUNITY handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - -------------------------------------------------------------- --- This will create COLLECTION handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md deleted file mode 100644 index 6cef123859ca..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# Oracle Flyway Database Migrations (i.e. Upgrades) - ---- -WARNING: Oracle Support is deprecated. -See https://github.com/DSpace/DSpace/issues/8214 ---- - -The SQL scripts in this directory are Oracle-specific database migrations. They are -used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). -As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using -and then executes the appropriate upgrade script(s) to bring it up to the latest -version. - -If any failures occur, Flyway will "rollback" the upgrade script which resulted -in an error and log the issue in the DSpace log file at `[dspace]/log/dspace.log.[date]` - -**WARNING:** IT IS NOT RECOMMENDED TO RUN THESE SCRIPTS MANUALLY. If you do so, -Flyway will may throw failures the next time you startup DSpace, as Flyway will -not realize you manually ran one or more scripts. - -Please see the Flyway Documentation for more information: http://flywaydb.org/ - -## Oracle Porting Notes for the Curious - -Oracle is missing quite a number of cool features found in Postgres, so -workarounds had to be found, most of which are hidden behind tests in -DatabaseManager. If Oracle is your DBMS, the workarounds are activated: - -Oracle doesn't like ';' characters in JDBC SQL - they have all been removed -from the DSpace source, including code in the .sql file reader to strip ;'s. - -browse code - LIMIT and OFFSET is used to limit browse results, and an -Oracle-hack is used to limit the result set to a given size - -Oracle has no boolean data type, so a new schema file was created that -uses NUMBER(1) (AKA 'integers') and code is inserted everywhere to use 0 for -false and 1 for true if DSpace is using Oracle. - -Oracle doesn't have a TEXT data type either, so TEXT columns are defined -as VARCHAR2 in the Oracle-specific schema. - -Oracle doesn't allow dynamic naming for objects, so our cute trick to -derive the name of the sequence by appending _seq to the table name -in a function doesn't work in Oracle - workaround is to insert Oracle -code to generate the name of the sequence and then place that into -our SQL calls to generate a new ID. - -Oracle doesn't let you directly set the value of sequences, so -update-sequences.sql is forced to use a special script sequpdate.sql -to update the sequences. - -Bitstream had a column 'size' which is a reserved word in Oracle, -so this had to be changed to 'size_bytes' with corresponding code changes. - -VARCHAR2 has a limit of 4000 characters, so DSpace text data is limited to 4k. -Going to the CLOB data type can get around that, but seemed like too much effort -for now. Note that with UTF-8 encoding that 4k could translate to 1300 -characters worst-case (every character taking up 3 bytes is the worst case -scenario.) - -### UPDATE 5 April 2007 - -CLOBs are now used as follows: -MetadataValue:text_value -Community:introductory_text -Community:copyright_text -Collection:introductory_text -Collection:license -Collection:copyright_text - -DatabaseManager had to have some of the type checking changed, because Oracle's -JDBC driver is reporting INTEGERS as type DECIMAL. - -Oracle doesn't like it when you reference table names in lower case when -getting JDBC metadata for the tables, so they are converted in TableRow -to upper case. - -### UPDATE 27 November 2012 - -Oracle complains with ORA-01408 if you attempt to create an index on a column which -has already had the UNIQUE contraint added (such an index is implicit in maintaining the uniqueness -of the column). See [DS-1370](https://jira.duraspace.org/browse/DS-1370) for details. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql deleted file mode 100644 index 157274e05d66..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql +++ /dev/null @@ -1,550 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE bitstreamformatregistry_seq; -CREATE SEQUENCE fileextension_seq; -CREATE SEQUENCE bitstream_seq; -CREATE SEQUENCE eperson_seq; --- start group sequence at 0, since Anonymous group = 0 -CREATE SEQUENCE epersongroup_seq MINVALUE 0 START WITH 0; -CREATE SEQUENCE item_seq; -CREATE SEQUENCE bundle_seq; -CREATE SEQUENCE item2bundle_seq; -CREATE SEQUENCE bundle2bitstream_seq; -CREATE SEQUENCE dctyperegistry_seq; -CREATE SEQUENCE dcvalue_seq; -CREATE SEQUENCE community_seq; -CREATE SEQUENCE collection_seq; -CREATE SEQUENCE community2community_seq; -CREATE SEQUENCE community2collection_seq; -CREATE SEQUENCE collection2item_seq; -CREATE SEQUENCE resourcepolicy_seq; -CREATE SEQUENCE epersongroup2eperson_seq; -CREATE SEQUENCE handle_seq; -CREATE SEQUENCE workspaceitem_seq; -CREATE SEQUENCE workflowitem_seq; -CREATE SEQUENCE tasklistitem_seq; -CREATE SEQUENCE registrationdata_seq; -CREATE SEQUENCE subscription_seq; -CREATE SEQUENCE history_seq; -CREATE SEQUENCE historystate_seq; -CREATE SEQUENCE communities2item_seq; -CREATE SEQUENCE itemsbyauthor_seq; -CREATE SEQUENCE itemsbytitle_seq; -CREATE SEQUENCE itemsbydate_seq; -CREATE SEQUENCE itemsbydateaccessioned_seq; - - -------------------------------------------------------- --- BitstreamFormatRegistry table -------------------------------------------------------- -CREATE TABLE BitstreamFormatRegistry -( - bitstream_format_id INTEGER PRIMARY KEY, - mimetype VARCHAR2(48), - short_description VARCHAR2(128) UNIQUE, - description VARCHAR2(2000), - support_level INTEGER, - -- Identifies internal types - internal NUMBER(1) -); - -------------------------------------------------------- --- FileExtension table -------------------------------------------------------- -CREATE TABLE FileExtension -( - file_extension_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - extension VARCHAR2(16) -); - -------------------------------------------------------- --- Bitstream table -------------------------------------------------------- -CREATE TABLE Bitstream -( - bitstream_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - name VARCHAR2(256), - size_bytes INTEGER, - checksum VARCHAR2(64), - checksum_algorithm VARCHAR2(32), - description VARCHAR2(2000), - user_format_description VARCHAR2(2000), - source VARCHAR2(256), - internal_id VARCHAR2(256), - deleted NUMBER(1), - store_number INTEGER, - sequence_id INTEGER -); - -------------------------------------------------------- --- EPerson table -------------------------------------------------------- -CREATE TABLE EPerson -( - eperson_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - password VARCHAR2(64), - firstname VARCHAR2(64), - lastname VARCHAR2(64), - can_log_in NUMBER(1), - require_certificate NUMBER(1), - self_registered NUMBER(1), - last_active TIMESTAMP, - sub_frequency INTEGER, - phone VARCHAR2(32) -); - -------------------------------------------------------- --- EPersonGroup table -------------------------------------------------------- -CREATE TABLE EPersonGroup -( - eperson_group_id INTEGER PRIMARY KEY, - name VARCHAR2(256) UNIQUE -); - -------------------------------------------------------- --- Item table -------------------------------------------------------- -CREATE TABLE Item -( - item_id INTEGER PRIMARY KEY, - submitter_id INTEGER REFERENCES EPerson(eperson_id), - in_archive NUMBER(1), - withdrawn NUMBER(1), - last_modified TIMESTAMP, - owning_collection INTEGER -); - -------------------------------------------------------- --- Bundle table -------------------------------------------------------- -CREATE TABLE Bundle -( - bundle_id INTEGER PRIMARY KEY, - mets_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT - primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - -------------------------------------------------------- --- Item2Bundle table -------------------------------------------------------- -CREATE TABLE Item2Bundle -( - id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - bundle_id INTEGER REFERENCES Bundle(bundle_id) -); - --- index by item_id -CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id); - -------------------------------------------------------- --- Bundle2Bitstream table -------------------------------------------------------- -CREATE TABLE Bundle2Bitstream -( - id INTEGER PRIMARY KEY, - bundle_id INTEGER REFERENCES Bundle(bundle_id), - bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - --- index by bundle_id -CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id); - -------------------------------------------------------- --- DCTypeRegistry table -------------------------------------------------------- -CREATE TABLE DCTypeRegistry -( - dc_type_id INTEGER PRIMARY KEY, - element VARCHAR2(64), - qualifier VARCHAR2(64), - scope_note VARCHAR2(2000), - UNIQUE(element, qualifier) -); - -------------------------------------------------------- --- DCValue table -------------------------------------------------------- -CREATE TABLE DCValue -( - dc_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - dc_type_id INTEGER REFERENCES DCTypeRegistry(dc_type_id), - text_value VARCHAR2(2000), - text_lang VARCHAR2(24), - place INTEGER, - source_id INTEGER -); - --- An index for item_id - almost all access is based on --- instantiating the item object, which grabs all dcvalues --- related to that item -CREATE INDEX dcvalue_item_idx on DCValue(item_id); - -------------------------------------------------------- --- Community table -------------------------------------------------------- -CREATE TABLE Community -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128) UNIQUE, - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000) -); - -------------------------------------------------------- --- Collection table -------------------------------------------------------- -CREATE TABLE Collection -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license VARCHAR2(2000), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -------------------------------------------------------- --- Community2Community table -------------------------------------------------------- -CREATE TABLE Community2Community -( - id INTEGER PRIMARY KEY, - parent_comm_id INTEGER REFERENCES Community(community_id), - child_comm_id INTEGER REFERENCES Community(community_id) -); - -------------------------------------------------------- --- Community2Collection table -------------------------------------------------------- -CREATE TABLE Community2Collection -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - -------------------------------------------------------- --- Collection2Item table -------------------------------------------------------- -CREATE TABLE Collection2Item -( - id INTEGER PRIMARY KEY, - collection_id INTEGER REFERENCES Collection(collection_id), - item_id INTEGER REFERENCES Item(item_id) -); - --- index by collection_id -CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id); - -------------------------------------------------------- --- ResourcePolicy table -------------------------------------------------------- -CREATE TABLE ResourcePolicy -( - policy_id INTEGER PRIMARY KEY, - resource_type_id INTEGER, - resource_id INTEGER, - action_id INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - start_date DATE, - end_date DATE -); - --- index by resource_type,resource_id - all queries by --- authorization manager are select type=x, id=y, action=z -CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id); - -------------------------------------------------------- --- EPersonGroup2EPerson table -------------------------------------------------------- -CREATE TABLE EPersonGroup2EPerson -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - eperson_id INTEGER REFERENCES EPerson(eperson_id) -); - --- Index by group ID (used heavily by AuthorizeManager) -CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id); - - -------------------------------------------------------- --- Handle table -------------------------------------------------------- -CREATE TABLE Handle -( - handle_id INTEGER PRIMARY KEY, - handle VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER -); - -------------------------------------------------------- --- WorkspaceItem table -------------------------------------------------------- -CREATE TABLE WorkspaceItem -( - workspace_item_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), -- boolean - published_before NUMBER(1), - multiple_files NUMBER(1), - -- How for the user has got in the submit process - stage_reached INTEGER -); - -------------------------------------------------------- --- WorkflowItem table -------------------------------------------------------- -CREATE TABLE WorkflowItem -( - workflow_id INTEGER PRIMARY KEY, - item_id INTEGER UNIQUE REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - state INTEGER, - owner INTEGER REFERENCES EPerson(eperson_id), - - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI - -); - -------------------------------------------------------- --- TasklistItem table -------------------------------------------------------- -CREATE TABLE TasklistItem -( - tasklist_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - workflow_id INTEGER REFERENCES WorkflowItem(workflow_id) -); - - -------------------------------------------------------- --- RegistrationData table -------------------------------------------------------- -CREATE TABLE RegistrationData -( - registrationdata_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - token VARCHAR2(48), - expires TIMESTAMP -); - - -------------------------------------------------------- --- Subscription table -------------------------------------------------------- -CREATE TABLE Subscription -( - subscription_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - - -------------------------------------------------------- --- History table -------------------------------------------------------- -CREATE TABLE History -( - history_id INTEGER PRIMARY KEY, - -- When it was stored - creation_date TIMESTAMP, - -- A checksum to keep INTEGERizations from being stored more than once - checksum VARCHAR2(32) UNIQUE -); - -------------------------------------------------------- --- HistoryState table -------------------------------------------------------- -CREATE TABLE HistoryState -( - history_state_id INTEGER PRIMARY KEY, - object_id VARCHAR2(64) -); - ------------------------------------------------------------- --- Browse subsystem tables and views ------------------------------------------------------------- - -------------------------------------------------------- --- Communities2Item table -------------------------------------------------------- -CREATE TABLE Communities2Item -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - item_id INTEGER REFERENCES Item(item_id) -); - -------------------------------------------------------- --- Community2Item view ------------------------------------------------------- -CREATE VIEW Community2Item as -SELECT Community2Collection.community_id, Collection2Item.item_id -FROM Community2Collection, Collection2Item -WHERE Collection2Item.collection_id = Community2Collection.collection_id -; - -------------------------------------------------------- --- ItemsByAuthor table -------------------------------------------------------- -CREATE TABLE ItemsByAuthor -( - items_by_author_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - author VARCHAR2(2000), - sort_author VARCHAR2(2000) -); - --- index by sort_author, of course! -CREATE INDEX sort_author_idx on ItemsByAuthor(sort_author); - -------------------------------------------------------- --- CollectionItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CollectionItemsByAuthor as -SELECT Collection2Item.collection_id, ItemsByAuthor.* -FROM ItemsByAuthor, Collection2Item -WHERE ItemsByAuthor.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CommunityItemsByAuthor as -SELECT Communities2Item.community_id, ItemsByAuthor.* -FROM ItemsByAuthor, Communities2Item -WHERE ItemsByAuthor.item_id = Communities2Item.item_id -; - ----------------------------------------- --- ItemsByTitle table ----------------------------------------- -CREATE TABLE ItemsByTitle -( - items_by_title_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - title VARCHAR2(2000), - sort_title VARCHAR2(2000) -); - --- index by the sort_title -CREATE INDEX sort_title_idx on ItemsByTitle(sort_title); - - -------------------------------------------------------- --- CollectionItemsByTitle view -------------------------------------------------------- -CREATE VIEW CollectionItemsByTitle as -SELECT Collection2Item.collection_id, ItemsByTitle.* -FROM ItemsByTitle, Collection2Item -WHERE ItemsByTitle.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByTitle view -------------------------------------------------------- -CREATE VIEW CommunityItemsByTitle as -SELECT Communities2Item.community_id, ItemsByTitle.* -FROM ItemsByTitle, Communities2Item -WHERE ItemsByTitle.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDate table -------------------------------------------------------- -CREATE TABLE ItemsByDate -( - items_by_date_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_issued VARCHAR2(2000) -); - --- sort by date -CREATE INDEX date_issued_idx on ItemsByDate(date_issued); - -------------------------------------------------------- --- CollectionItemsByDate view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDate as -SELECT Collection2Item.collection_id, ItemsByDate.* -FROM ItemsByDate, Collection2Item -WHERE ItemsByDate.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDate view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDate as -SELECT Communities2Item.community_id, ItemsByDate.* -FROM ItemsByDate, Communities2Item -WHERE ItemsByDate.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDateAccessioned table -------------------------------------------------------- -CREATE TABLE ItemsByDateAccessioned -( - items_by_date_accessioned_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_accessioned VARCHAR2(2000) -); - -------------------------------------------------------- --- CollectionItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDateAccession as -SELECT Collection2Item.collection_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Collection2Item -WHERE ItemsByDateAccessioned.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDateAccession as -SELECT Communities2Item.community_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Communities2Item -WHERE ItemsByDateAccessioned.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql deleted file mode 100644 index 37d7e115eb53..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql +++ /dev/null @@ -1,57 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE epersongroup2workspaceitem_seq; - -------------------------------------------------------------------------------- --- create the new EPersonGroup2WorkspaceItem table -------------------------------------------------------------------------------- - -CREATE TABLE EPersonGroup2WorkspaceItem -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - workspace_item_id INTEGER REFERENCES WorkspaceItem(workspace_item_id) -); - -------------------------------------------------------------------------------- --- modification to collection table to support being able to change the --- submitter and collection admin group names -------------------------------------------------------------------------------- -ALTER TABLE collection ADD submitter INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE collection ADD admin INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE eperson ADD netid VARCHAR2(64) UNIQUE; - -------------------------------------------------------------------------------- --- Additional indices for performance -------------------------------------------------------------------------------- - --- index by resource id and resource type id -CREATE INDEX handle_resource_id_type_idx ON handle(resource_id, resource_type_id); - --- Indexing browse tables update/re-index performance -CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id ); -CREATE INDEX ItemsByAuthor_item_id_idx ON ItemsByAuthor(item_id); -CREATE INDEX ItemsByTitle_item_id_idx ON ItemsByTitle(item_id); -CREATE INDEX ItemsByDate_item_id_idx ON ItemsByDate(item_id); -CREATE INDEX ItemsByDateAcc_item_id_idx ON ItemsByDateAccessioned(item_id); - --- Improve mapping tables -CREATE INDEX Com2Coll_community_id_idx ON Community2Collection(community_id); -CREATE INDEX Com2Coll_collection_id_idx ON Community2Collection(collection_id); -CREATE INDEX Coll2Item_item_id_idx ON Collection2Item( item_id ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql deleted file mode 100644 index a713ced8bbb2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql +++ /dev/null @@ -1,133 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ---------------------------------------- --- Update MetadataValue to include CLOB ---------------------------------------- - -CREATE TABLE MetadataValueTemp -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value CLOB, - text_lang VARCHAR(64), - place INTEGER -); - -INSERT INTO MetadataValueTemp -SELECT * FROM MetadataValue; - -DROP VIEW dcvalue; -DROP TABLE MetadataValue; -ALTER TABLE MetadataValueTemp RENAME TO MetadataValue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); - ------------------------------------- --- Update Community to include CLOBs ------------------------------------- - -CREATE TABLE CommunityTemp -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text CLOB, - side_bar_text VARCHAR2(2000) -); - -INSERT INTO CommunityTemp -SELECT * FROM Community; - -DROP TABLE Community CASCADE CONSTRAINTS; -ALTER TABLE CommunityTemp RENAME TO Community; - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_parent -FOREIGN KEY (parent_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_child -FOREIGN KEY (child_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -ALTER TABLE Communities2Item ADD CONSTRAINT fk_c2i_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -------------------------------------- --- Update Collection to include CLOBs -------------------------------------- - -CREATE TABLE CollectionTemp -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license CLOB, - copyright_text CLOB, - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ), - admin INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -INSERT INTO CollectionTemp -SELECT * FROM Collection; - -DROP TABLE Collection CASCADE CONSTRAINTS; -ALTER TABLE CollectionTemp RENAME TO Collection; - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Collection2Item ADD CONSTRAINT fk_c2i_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkspaceItem ADD CONSTRAINT fk_wsi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkflowItem ADD CONSTRAINT fk_wfi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Subscription ADD CONSTRAINT fk_subs_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql deleted file mode 100644 index 54cf10067b91..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ /dev/null @@ -1,371 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------- --- Sequences for Group within Group feature -------------------------------------------------------------------------------- -CREATE SEQUENCE group2group_seq; -CREATE SEQUENCE group2groupcache_seq; - ------------------------------------------------------- --- Group2Group table, records group membership in other groups ------------------------------------------------------- -CREATE TABLE Group2Group -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - ------------------------------------------------------- --- Group2GroupCache table, is the 'unwound' hierarchy in --- Group2Group. It explicitly names every parent child --- relationship, even with nested groups. For example, --- If Group2Group lists B is a child of A and C is a child of B, --- this table will have entries for parent(A,B), and parent(B,C) --- AND parent(A,C) so that all of the child groups of A can be --- looked up in a single simple query ------------------------------------------------------- -CREATE TABLE Group2GroupCache -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - - -------------------------------------------------------- --- New Metadata Tables and Sequences -------------------------------------------------------- -CREATE SEQUENCE metadataschemaregistry_seq; -CREATE SEQUENCE metadatafieldregistry_seq; -CREATE SEQUENCE metadatavalue_seq; - --- MetadataSchemaRegistry table -CREATE TABLE MetadataSchemaRegistry -( - metadata_schema_id INTEGER PRIMARY KEY, - namespace VARCHAR(256) UNIQUE, - short_id VARCHAR(32) -); - --- MetadataFieldRegistry table -CREATE TABLE MetadataFieldRegistry -( - metadata_field_id INTEGER PRIMARY KEY, - metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id), - element VARCHAR(64), - qualifier VARCHAR(64), - scope_note VARCHAR2(2000) -); - --- MetadataValue table -CREATE TABLE MetadataValue -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value VARCHAR2(2000), - text_lang VARCHAR(24), - place INTEGER -); - --- Create the DC schema -INSERT INTO MetadataSchemaRegistry VALUES (1,'http://dublincore.org/documents/dcmi-terms/','dc'); - --- Migrate the existing DCTypes into the new metadata field registry -INSERT INTO MetadataFieldRegistry - (metadata_schema_id, metadata_field_id, element, qualifier, scope_note) - SELECT '1' AS metadata_schema_id, dc_type_id, element, - qualifier, scope_note FROM dctyperegistry; - --- Copy the DCValues into the new MetadataValue table -INSERT INTO MetadataValue (item_id, metadata_field_id, text_value, text_lang, place) - SELECT item_id, dc_type_id, text_value, text_lang, place FROM dcvalue; - -DROP TABLE dcvalue; -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - - --- After copying data from dctypregistry to metadataschemaregistry, we need to reset our sequences --- Update metadatafieldregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_field_id) INTO curr FROM metadatafieldregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatafieldregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatafieldregistry_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadatavalue_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_value_id) INTO curr FROM metadatavalue; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatavalue_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatavalue_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadataschemaregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_schema_id) INTO curr FROM metadataschemaregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadataschemaregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadataschemaregistry_seq START WITH ' || NVL(curr,1); -END; -/ - --- Drop the old dctyperegistry -DROP TABLE dctyperegistry; - --- create indexes for the metadata tables -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); -CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id); - - -------------------------------------------------------- --- Create the checksum checker tables -------------------------------------------------------- --- list of the possible results as determined --- by the system or an administrator - -CREATE TABLE checksum_results -( - result_code VARCHAR(64) PRIMARY KEY, - result_description VARCHAR2(2000) -); - - --- This table has a one-to-one relationship --- with the bitstream table. A row will be inserted --- every time a row is inserted into the bitstream table, and --- that row will be updated every time the checksum is --- re-calculated. - -CREATE TABLE most_recent_checksum -( - bitstream_id INTEGER PRIMARY KEY, - to_be_processed NUMBER(1) NOT NULL, - expected_checksum VARCHAR(64) NOT NULL, - current_checksum VARCHAR(64) NOT NULL, - last_process_start_date TIMESTAMP NOT NULL, - last_process_end_date TIMESTAMP NOT NULL, - checksum_algorithm VARCHAR(64) NOT NULL, - matched_prev_checksum NUMBER(1) NOT NULL, - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - - --- A row will be inserted into this table every --- time a checksum is re-calculated. - -CREATE SEQUENCE checksum_history_seq; - -CREATE TABLE checksum_history -( - check_id INTEGER PRIMARY KEY, - bitstream_id INTEGER, - process_start_date TIMESTAMP, - process_end_date TIMESTAMP, - checksum_expected VARCHAR(64), - checksum_calculated VARCHAR(64), - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - --- this will insert into the result code --- the initial results - -insert into checksum_results -values -( - 'INVALID_HISTORY', - 'Install of the cheksum checking code do not consider this history as valid' -); - -insert into checksum_results -values -( - 'BITSTREAM_NOT_FOUND', - 'The bitstream could not be found' -); - -insert into checksum_results -values -( - 'CHECKSUM_MATCH', - 'Current checksum matched previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_NO_MATCH', - 'Current checksum does not match previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_PREV_NOT_FOUND', - 'Previous checksum was not found: no comparison possible' -); - -insert into checksum_results -values -( - 'BITSTREAM_INFO_NOT_FOUND', - 'Bitstream info not found' -); - -insert into checksum_results -values -( - 'CHECKSUM_ALGORITHM_INVALID', - 'Invalid checksum algorithm' -); -insert into checksum_results -values -( - 'BITSTREAM_NOT_PROCESSED', - 'Bitstream marked to_be_processed=false' -); -insert into checksum_results -values -( - 'BITSTREAM_MARKED_DELETED', - 'Bitstream marked deleted in bitstream table' -); - --- this will insert into the most recent checksum --- on install all existing bitstreams --- setting all bitstreams already set as --- deleted to not be processed - -insert into most_recent_checksum -( - bitstream_id, - to_be_processed, - expected_checksum, - current_checksum, - last_process_start_date, - last_process_end_date, - checksum_algorithm, - matched_prev_checksum -) -select - bitstream.bitstream_id, - '1', - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' -from bitstream; - --- Update all the deleted checksums --- to not be checked --- because they have since been --- deleted from the system - -update most_recent_checksum -set to_be_processed = 0 -where most_recent_checksum.bitstream_id in ( -select bitstream_id -from bitstream where deleted = '1' ); - --- this will insert into history table --- for the initial start --- we want to tell the users to disregard the initial --- inserts into the checksum history table - -insert into checksum_history -( - bitstream_id, - process_start_date, - process_end_date, - checksum_expected, - checksum_calculated -) -select most_recent_checksum.bitstream_id, - most_recent_checksum.last_process_end_date, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - most_recent_checksum.expected_checksum, - most_recent_checksum.expected_checksum -FROM most_recent_checksum; - --- update the history to indicate that this was --- the first time the software was installed -update checksum_history -set result = 'INVALID_HISTORY'; - - -------------------------------------------------------- --- Table and views for 'browse by subject' functionality -------------------------------------------------------- -CREATE SEQUENCE itemsbysubject_seq; - -------------------------------------------------------- --- ItemsBySubject table -------------------------------------------------------- -CREATE TABLE ItemsBySubject -( - items_by_subject_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - subject VARCHAR2(2000), - sort_subject VARCHAR2(2000) -); - --- index by sort_subject -CREATE INDEX sort_subject_idx on ItemsBySubject(sort_subject); - -------------------------------------------------------- --- CollectionItemsBySubject view -------------------------------------------------------- -CREATE VIEW CollectionItemsBySubject as -SELECT Collection2Item.collection_id, ItemsBySubject.* -FROM ItemsBySubject, Collection2Item -WHERE ItemsBySubject.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsBySubject view -------------------------------------------------------- -CREATE VIEW CommunityItemsBySubject as -SELECT Communities2Item.community_id, ItemsBySubject.* -FROM ItemsBySubject, Communities2Item -WHERE ItemsBySubject.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql deleted file mode 100644 index bb217bd0d18d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql +++ /dev/null @@ -1,142 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Remove NOT NULL restrictions from the checksum columns of most_recent_checksum -ALTER TABLE most_recent_checksum MODIFY expected_checksum null; -ALTER TABLE most_recent_checksum MODIFY current_checksum null; - ------------------------------------------------------- --- New Column language language in EPerson ------------------------------------------------------- - -alter table eperson ADD language VARCHAR2(64); -update eperson set language = 'en'; - --- totally unused column -alter table bundle drop column mets_bitstream_id; - -------------------------------------------------------------------------------- --- Necessary for Configurable Submission functionality: --- Modification to workspaceitem table to support keeping track --- of the last page reached within a step in the Configurable Submission Process -------------------------------------------------------------------------------- -ALTER TABLE workspaceitem ADD page_reached INTEGER; - - -------------------------------------------------------------------------- --- Increase the mimetype field size to support larger types, such as the --- new Word 2007 mimetypes. -------------------------------------------------------------------------- -ALTER TABLE BitstreamFormatRegistry MODIFY (mimetype VARCHAR(256)); - - -------------------------------------------------------------------------- --- Tables to manage cache of item counts for communities and collections -------------------------------------------------------------------------- - -CREATE TABLE collection_item_count ( - collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id), - count INTEGER -); - -CREATE TABLE community_item_count ( - community_id INTEGER PRIMARY KEY REFERENCES community(community_id), - count INTEGER -); - ------------------------------------------------------------------- --- Remove sequences and tables of the old browse system ------------------------------------------------------------------- - -DROP SEQUENCE itemsbyauthor_seq; -DROP SEQUENCE itemsbytitle_seq; -DROP SEQUENCE itemsbydate_seq; -DROP SEQUENCE itemsbydateaccessioned_seq; -DROP SEQUENCE itemsbysubject_seq; - -DROP TABLE ItemsByAuthor CASCADE CONSTRAINTS; -DROP TABLE ItemsByTitle CASCADE CONSTRAINTS; -DROP TABLE ItemsByDate CASCADE CONSTRAINTS; -DROP TABLE ItemsByDateAccessioned CASCADE CONSTRAINTS; -DROP TABLE ItemsBySubject CASCADE CONSTRAINTS; - -DROP TABLE History CASCADE CONSTRAINTS; -DROP TABLE HistoryState CASCADE CONSTRAINTS; - ----------------------------------------------------------------- --- Add indexes for foreign key columns ----------------------------------------------------------------- - -CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id); - -CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id); - -CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id); -CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id); - --- CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id); --- CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id); - -CREATE INDEX item_submitter_fk_idx ON Item(submitter_id); - -CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id); - -CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id); - -CREATE INDEX bundle2bits_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id); - -CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id); - -CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id); - -CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id); -CREATE INDEX collection_template_fk_idx ON Collection(template_item_id); -CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1); -CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2); -CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3); -CREATE INDEX collection_submitter_fk_idx ON Collection(submitter); -CREATE INDEX collection_admin_fk_idx ON Collection(admin); - -CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id); -CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id); - -CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id); -CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id); - -CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id); - -CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id); -CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id); - --- CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id); -CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id); -CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner); - -CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id); -CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id); - -CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id); -CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id); - -CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id); -CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id); - -CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id ); - -CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result ); - -CREATE INDEX ch_result_fk_idx ON checksum_history( result ); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql deleted file mode 100644 index 659ca32983cc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql +++ /dev/null @@ -1,93 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- New Column for Community Admin - Delegated Admin patch (DS-228) ------------------------------------------------------------------- -ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id ); -CREATE INDEX community_admin_fk_idx ON Community(admin); - -------------------------------------------------------------------------- --- DS-236 schema changes for Authority Control of Metadata Values -------------------------------------------------------------------------- -ALTER TABLE MetadataValue - ADD ( authority VARCHAR(100), - confidence INTEGER DEFAULT -1); - --------------------------------------------------------------------------- --- DS-295 CC License being assigned incorrect Mime Type during submission. --------------------------------------------------------------------------- -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License') - WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons'; - -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML') - WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons'; - -------------------------------------------------------------------------- --- DS-260 Cleanup of Owning collection column for template item created --- with the JSPUI after the collection creation -------------------------------------------------------------------------- -UPDATE item SET owning_collection = null WHERE item_id IN - (SELECT template_item_id FROM collection WHERE template_item_id IS NOT null); - --- Recreate restraints with a know name and deferrable option! --- (The previous version of these constraints is dropped by org.dspace.storage.rdbms.migration.V1_5_9__Drop_constraint_for_DSpace_1_6_schema) -ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE; -ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE; -ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE; - - ------------------------------------------------------------------- --- New tables /sequences for the harvester functionality (DS-289) ------------------------------------------------------------------- -CREATE SEQUENCE harvested_collection_seq; -CREATE SEQUENCE harvested_item_seq; - -------------------------------------------------------- --- Create the harvest settings table -------------------------------------------------------- --- Values used by the OAIHarvester to harvest a collection --- HarvestInstance is the DAO class for this table - -CREATE TABLE harvested_collection -( - collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE, - harvest_type INTEGER, - oai_source VARCHAR(256), - oai_set_id VARCHAR(256), - harvest_message VARCHAR2(512), - metadata_config_id VARCHAR(256), - harvest_status INTEGER, - harvest_start_time TIMESTAMP, - last_harvested TIMESTAMP, - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id); - - -CREATE TABLE harvested_item -( - item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE, - last_harvested TIMESTAMP, - oai_id VARCHAR(64), - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql deleted file mode 100644 index f4b2737fb3a8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- Remove unused / obsolete sequence 'dctyperegistry_seq' (DS-729) ------------------------------------------------------------------- -DROP SEQUENCE dctyperegistry_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql deleted file mode 100644 index f96cddbe7fd4..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- New column for bitstream order DS-749 -- -------------------------------------------- -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; - ---Place the sequence id's in the order -UPDATE bundle2bitstream SET bitstream_order=(SELECT sequence_id FROM bitstream WHERE bitstream.bitstream_id=bundle2bitstream.bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql deleted file mode 100644 index 472dc7dc5279..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -ALTER TABLE resourcepolicy - ADD ( - rpname VARCHAR2(30), - rptype VARCHAR2(30), - rpdescription VARCHAR2(100) - ); - - -ALTER TABLE item ADD discoverable NUMBER(1); - -CREATE TABLE versionhistory -( - versionhistory_id INTEGER NOT NULL PRIMARY KEY -); - -CREATE TABLE versionitem -( - versionitem_id INTEGER NOT NULL PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - version_number INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - version_date TIMESTAMP, - version_summary VARCHAR2(255), - versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id) -); - -CREATE SEQUENCE versionitem_seq; -CREATE SEQUENCE versionhistory_seq; - - -------------------------------------------- --- New columns and longer hash for salted password hashing DS-861 -- -------------------------------------------- -ALTER TABLE EPerson modify( password VARCHAR(128)); -ALTER TABLE EPerson ADD salt VARCHAR(32); -ALTER TABLE EPerson ADD digest_algorithm VARCHAR(16); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql deleted file mode 100644 index 8102376906a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql +++ /dev/null @@ -1,88 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- Ensure that discoverable has a sensible default -------------------------------------------- -update item set discoverable=1 WHERE discoverable IS NULL; - -------------------------------------------- --- Add support for DOIs (table and seq.) -- -------------------------------------------- - -CREATE TABLE Doi -( - doi_id INTEGER PRIMARY KEY, - doi VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER, - status INTEGER -); - -CREATE SEQUENCE doi_seq; - --- index by resource id and resource type id -CREATE INDEX doi_resource_id_type_idx ON doi(resource_id, resource_type_id); - -------------------------------------------- --- Table of running web applications for 'dspace version' -- -------------------------------------------- - -CREATE TABLE Webapp -( - webapp_id INTEGER NOT NULL PRIMARY KEY, - AppName VARCHAR2(32), - URL VARCHAR2(1000), - Started TIMESTAMP, - isUI NUMBER(1) -); - -CREATE SEQUENCE webapp_seq; - -------------------------------------------------------- --- DS-824 RequestItem table -------------------------------------------------------- - -CREATE TABLE requestitem -( - requestitem_id INTEGER NOT NULL, - token varchar(48), - item_id INTEGER, - bitstream_id INTEGER, - allfiles NUMBER(1), - request_email VARCHAR2(64), - request_name VARCHAR2(64), - request_date TIMESTAMP, - accept_request NUMBER(1), - decision_date TIMESTAMP, - expires TIMESTAMP, - CONSTRAINT requestitem_pkey PRIMARY KEY (requestitem_id), - CONSTRAINT requestitem_token_key UNIQUE (token) -); - -CREATE SEQUENCE requestitem_seq; - -------------------------------------------------------- --- DS-1655 Disable "Initial Questions" page in Submission UI by default -------------------------------------------------------- -update workspaceitem set multiple_titles=1, published_before=1, multiple_files=1; -update workflowitem set multiple_titles=1, published_before=1, multiple_files=1; - -------------------------------------------------------- --- DS-1811 Removing a collection fails if non-Solr DAO has been used before for item count -------------------------------------------------------- -delete from collection_item_count; -delete from community_item_count; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql deleted file mode 100644 index 6d75905ec980..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql +++ /dev/null @@ -1,64 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Special case of migration, we need to the EPerson schema in order to get our metadata for all queries to work --- but we cannot a DB connection until our database is up to date, so we need to create our registries manually in sql - -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/eperson' as namespace, 'eperson' as short_id FROM dual - WHERE NOT EXISTS (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry WHERE namespace = 'http://dspace.org/eperson' AND short_id = 'eperson'); - - --- Insert eperson.firstname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'firstname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'firstname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.lastname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'lastname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'lastname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.phone -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'phone' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'phone' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.language -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'language' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'language' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert into dc.provenance -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'provenance' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'provenance' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); - --- Insert into dc.rights.license -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'rights', 'license' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry WHERE element = 'rights' AND qualifier='license' AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql deleted file mode 100644 index c86cfe31223e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1945 RequestItem Helpdesk, store request message ------------------------------------------------------- -ALTER TABLE requestitem ADD request_message VARCHAR2(2000); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql deleted file mode 100644 index 8f0cd0d5e1d7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ /dev/null @@ -1,333 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1582 Metadata on all DSpace Objects --- NOTE: This script also has a complimentary Flyway Java Migration --- which drops the "item_id" constraint on metadatavalue --- org.dspace.storage.rdbms.migration.V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint ------------------------------------------------------- -alter table metadatavalue rename column item_id to resource_id; - -alter table metadatavalue MODIFY(resource_id not null); -alter table metadatavalue add resource_type_id integer; -UPDATE metadatavalue SET resource_type_id = 2; -alter table metadatavalue MODIFY(resource_type_id not null); - - - --- --------- --- community --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM community where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM community where not name is null; - -alter table community drop (introductory_text, short_description, side_bar_text, copyright_text, name); - - --- ---------- --- collection --- ---------- - - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM collection where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, -provenance_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not provenance_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, -license AS text_value, -null AS text_lang, -0 AS place -FROM collection where not license is null; - -alter table collection drop (introductory_text, short_description, copyright_text, side_bar_text, name, license, provenance_description); - - --- --------- --- bundle --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bundle_id AS resource_id, -1 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bundle where not name is null; - -alter table bundle drop column name; - - - --- --------- --- bitstream --- --------- - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, -user_format_description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not user_format_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, -source AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not source is null; - -alter table bitstream drop (name, description, user_format_description, source); - - --- --------- --- epersongroup --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_group_id AS resource_id, -6 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM epersongroup where not name is null; - -alter table epersongroup drop column name; - - - --- --------- --- eperson --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, -firstname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not firstname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, -lastname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not lastname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, -phone AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not phone is null; - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, -language AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not language is null; - -alter table eperson drop (firstname, lastname, phone, language); - --- --------- --- dcvalue view --- --------- - -drop view dcvalue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.resource_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1 AND MetadataValue.resource_type_id = 2; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql deleted file mode 100644 index 2e09b807de3b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql deleted file mode 100644 index 9f9836faf471..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3563 Missing database index on metadatavalue.resource_type_id ------------------------------------------------------- --- Create an index on the metadata value resource_type_id column so that it can be searched efficiently. -declare - index_not_exists EXCEPTION; - PRAGMA EXCEPTION_INIT(index_not_exists, -1418); -begin - - execute immediate 'DROP INDEX metadatavalue_type_id_idx'; - exception - when index_not_exists then null; -end; -/ -CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql deleted file mode 100644 index dd857e763df0..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql +++ /dev/null @@ -1,469 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -DROP VIEW community2item; - -CREATE TABLE dspaceobject -( - uuid RAW(16) NOT NULL PRIMARY KEY -); - -CREATE TABLE site -( - uuid RAW(16) NOT NULL PRIMARY KEY REFERENCES dspaceobject(uuid) -); - -ALTER TABLE eperson ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM eperson; -ALTER TABLE eperson ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE eperson MODIFY uuid NOT NULL; -ALTER TABLE eperson ADD CONSTRAINT eperson_id_unique PRIMARY KEY (uuid); -UPDATE eperson SET require_certificate = '0' WHERE require_certificate IS NULL; -UPDATE eperson SET self_registered = '0' WHERE self_registered IS NULL; - - - -UPDATE metadatavalue SET text_value='Administrator' - WHERE resource_type_id=6 AND resource_id=1; -UPDATE metadatavalue SET text_value='Anonymous' - WHERE resource_type_id=6 AND resource_id=0; - -ALTER TABLE epersongroup ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM epersongroup; -ALTER TABLE epersongroup ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE epersongroup MODIFY uuid NOT NULL; -ALTER TABLE epersongroup ADD CONSTRAINT epersongroup_id_unique PRIMARY KEY (uuid); - -ALTER TABLE item ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM item; -ALTER TABLE item ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE item MODIFY uuid NOT NULL; -ALTER TABLE item ADD CONSTRAINT item_id_unique PRIMARY KEY (uuid); - -ALTER TABLE community ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM community; -ALTER TABLE community ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE community MODIFY uuid NOT NULL; -ALTER TABLE community ADD CONSTRAINT community_id_unique PRIMARY KEY (uuid); - - -ALTER TABLE collection ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM collection; -ALTER TABLE collection ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE collection MODIFY uuid NOT NULL; -ALTER TABLE collection ADD CONSTRAINT collection_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bundle ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bundle; -ALTER TABLE bundle ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bundle MODIFY uuid NOT NULL; -ALTER TABLE bundle ADD CONSTRAINT bundle_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bitstream ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bitstream; -ALTER TABLE bitstream ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bitstream MODIFY uuid NOT NULL; -ALTER TABLE bitstream ADD CONSTRAINT bitstream_id_unique PRIMARY KEY (uuid); -UPDATE bitstream SET sequence_id = -1 WHERE sequence_id IS NULL; -UPDATE bitstream SET size_bytes = -1 WHERE size_bytes IS NULL; -UPDATE bitstream SET deleted = '0' WHERE deleted IS NULL; -UPDATE bitstream SET store_number = -1 WHERE store_number IS NULL; - --- Migrate EPersonGroup2EPerson table -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE EPersonGroup2EPerson ADD eperson_group_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE EPersonGroup2EPerson ADD eperson_id RAW(16) REFERENCES Eperson(uuid); -CREATE INDEX EpersonGroup2Eperson_group on EpersonGroup2Eperson(eperson_group_id); -CREATE INDEX EpersonGroup2Eperson_person on EpersonGroup2Eperson(eperson_id); -UPDATE EPersonGroup2EPerson SET eperson_group_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE EPersonGroup2EPerson.eperson_group_legacy_id = EPersonGroup.eperson_group_id); -UPDATE EPersonGroup2EPerson SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE EPersonGroup2EPerson.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_group_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_legacy_id; -ALTER TABLE epersongroup2eperson DROP COLUMN id; -ALTER TABLE EPersonGroup2EPerson add CONSTRAINT EPersonGroup2EPerson_unique primary key (eperson_group_id,eperson_id); - --- Migrate GROUP2GROUP table -ALTER TABLE Group2Group RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2Group RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2Group ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2Group ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2Group_parent on Group2Group(parent_id); -CREATE INDEX Group2Group_child on Group2Group(child_id); -UPDATE Group2Group SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2Group.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2Group SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2Group.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2Group MODIFY parent_id NOT NULL; -ALTER TABLE Group2Group MODIFY child_id NOT NULL; -ALTER TABLE Group2Group DROP COLUMN parent_legacy_id; -ALTER TABLE Group2Group DROP COLUMN child_legacy_id; -ALTER TABLE Group2Group DROP COLUMN id; -ALTER TABLE Group2Group add CONSTRAINT Group2Group_unique primary key (parent_id,child_id); - --- Migrate collection2item -ALTER TABLE Collection2Item RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Collection2Item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE Collection2Item ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE Collection2Item ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX Collecion2Item_collection on Collection2Item(collection_id); -CREATE INDEX Collecion2Item_item on Collection2Item(item_id); -UPDATE Collection2Item SET collection_id = (SELECT Collection.uuid FROM Collection WHERE Collection2Item.collection_legacy_id = Collection.collection_id); -UPDATE Collection2Item SET item_id = (SELECT Item.uuid FROM Item WHERE Collection2Item.item_legacy_id = Item.item_id); -ALTER TABLE Collection2Item MODIFY collection_id NOT NULL; -ALTER TABLE Collection2Item MODIFY item_id NOT NULL; -ALTER TABLE Collection2Item DROP COLUMN collection_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN item_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN id; --- Magic query that will delete all duplicate collection item_id references from the database (if we don't do this the primary key creation will fail) -DELETE FROM collection2item WHERE rowid NOT IN (SELECT MIN(rowid) FROM collection2item GROUP BY collection_id,item_id); -ALTER TABLE Collection2Item add CONSTRAINT collection2item_unique primary key (collection_id,item_id); - --- Migrate Community2Community -ALTER TABLE Community2Community RENAME COLUMN parent_comm_id to parent_legacy_id; -ALTER TABLE Community2Community RENAME COLUMN child_comm_id to child_legacy_id; -ALTER TABLE Community2Community ADD parent_comm_id RAW(16) REFERENCES Community(uuid); -ALTER TABLE Community2Community ADD child_comm_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX Community2Community_parent on Community2Community(parent_comm_id); -CREATE INDEX Community2Community_child on Community2Community(child_comm_id); -UPDATE Community2Community SET parent_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.parent_legacy_id = Community.community_id); -UPDATE Community2Community SET child_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.child_legacy_id = Community.community_id); -ALTER TABLE Community2Community MODIFY parent_comm_id NOT NULL; -ALTER TABLE Community2Community MODIFY child_comm_id NOT NULL; -ALTER TABLE Community2Community DROP COLUMN parent_legacy_id; -ALTER TABLE Community2Community DROP COLUMN child_legacy_id; -ALTER TABLE Community2Community DROP COLUMN id; -ALTER TABLE Community2Community add CONSTRAINT Community2Community_unique primary key (parent_comm_id,child_comm_id); - --- Migrate community2collection -ALTER TABLE community2collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE community2collection RENAME COLUMN community_id to community_legacy_id; -ALTER TABLE community2collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE community2collection ADD community_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX community2collection_collectio on community2collection(collection_id); -CREATE INDEX community2collection_community on community2collection(community_id); -UPDATE community2collection SET collection_id = (SELECT Collection.uuid FROM Collection WHERE community2collection.collection_legacy_id = Collection.collection_id); -UPDATE community2collection SET community_id = (SELECT Community.uuid FROM Community WHERE community2collection.community_legacy_id = Community.community_id); -ALTER TABLE community2collection MODIFY collection_id NOT NULL; -ALTER TABLE community2collection MODIFY community_id NOT NULL; -ALTER TABLE community2collection DROP COLUMN collection_legacy_id; -ALTER TABLE community2collection DROP COLUMN community_legacy_id; -ALTER TABLE community2collection DROP COLUMN id; -ALTER TABLE community2collection add CONSTRAINT community2collection_unique primary key (collection_id,community_id); - - --- Migrate Group2GroupCache table -ALTER TABLE Group2GroupCache RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2GroupCache RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2GroupCache ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2GroupCache ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2GroupCache_parent on Group2GroupCache(parent_id); -CREATE INDEX Group2GroupCache_child on Group2GroupCache(child_id); -UPDATE Group2GroupCache SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2GroupCache SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2GroupCache MODIFY parent_id NOT NULL; -ALTER TABLE Group2GroupCache MODIFY child_id NOT NULL; -ALTER TABLE Group2GroupCache DROP COLUMN parent_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN child_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN id; -ALTER TABLE Group2GroupCache add CONSTRAINT Group2GroupCache_unique primary key (parent_id,child_id); - --- Migrate Item2Bundle -ALTER TABLE item2bundle RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE item2bundle RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE item2bundle ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE item2bundle ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX item2bundle_bundle on item2bundle(bundle_id); -CREATE INDEX item2bundle_item on item2bundle(item_id); -UPDATE item2bundle SET bundle_id = (SELECT Bundle.uuid FROM Bundle WHERE item2bundle.bundle_legacy_id = Bundle.bundle_id); -UPDATE item2bundle SET item_id = (SELECT Item.uuid FROM Item WHERE item2bundle.item_legacy_id = Item.item_id); -ALTER TABLE item2bundle MODIFY bundle_id NOT NULL; -ALTER TABLE item2bundle MODIFY item_id NOT NULL; -ALTER TABLE item2bundle DROP COLUMN bundle_legacy_id; -ALTER TABLE item2bundle DROP COLUMN item_legacy_id; -ALTER TABLE item2bundle DROP COLUMN id; -ALTER TABLE item2bundle add CONSTRAINT item2bundle_unique primary key (bundle_id,item_id); - ---Migrate Bundle2Bitsteam -ALTER TABLE bundle2bitstream RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE bundle2bitstream ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE bundle2bitstream ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle2bitstream_bundle on bundle2bitstream(bundle_id); -CREATE INDEX bundle2bitstream_bitstream on bundle2bitstream(bitstream_id); -UPDATE bundle2bitstream SET bundle_id = (SELECT bundle.uuid FROM bundle WHERE bundle2bitstream.bundle_legacy_id = bundle.bundle_id); -UPDATE bundle2bitstream SET bitstream_id = (SELECT bitstream.uuid FROM bitstream WHERE bundle2bitstream.bitstream_legacy_id = bitstream.bitstream_id); -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_order to bitstream_order_legacy; -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; -MERGE INTO bundle2bitstream dst -USING ( SELECT ROWID AS r_id - , ROW_NUMBER () OVER ( PARTITION BY bundle_id - ORDER BY bitstream_order_legacy, bitstream_id - ) AS new_order - FROM bundle2bitstream - ) src -ON (dst.ROWID = src.r_id) -WHEN MATCHED THEN UPDATE -SET dst.bitstream_order = (src.new_order-1) -; -ALTER TABLE bundle2bitstream MODIFY bundle_id NOT NULL; -ALTER TABLE bundle2bitstream MODIFY bitstream_id NOT NULL; -ALTER TABLE bundle2bitstream DROP COLUMN bundle_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN bitstream_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN id; -ALTER TABLE bundle2bitstream add CONSTRAINT bundle2bitstream_unique primary key (bitstream_id,bundle_id,bitstream_order); - - --- Migrate item -ALTER TABLE item RENAME COLUMN submitter_id to submitter_id_legacy_id; -ALTER TABLE item ADD submitter_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX item_submitter on item(submitter_id); -UPDATE item SET submitter_id = (SELECT eperson.uuid FROM eperson WHERE item.submitter_id_legacy_id = eperson.eperson_id); -ALTER TABLE item DROP COLUMN submitter_id_legacy_id; - -ALTER TABLE item RENAME COLUMN owning_collection to owning_collection_legacy; -ALTER TABLE item ADD owning_collection RAW(16) REFERENCES Collection(uuid); -CREATE INDEX item_collection on item(owning_collection); -UPDATE item SET owning_collection = (SELECT Collection.uuid FROM Collection WHERE item.owning_collection_legacy = collection.collection_id); -ALTER TABLE item DROP COLUMN owning_collection_legacy; - -UPDATE item SET in_archive = '0' WHERE in_archive IS NULL; -UPDATE item SET discoverable = '0' WHERE discoverable IS NULL; -UPDATE item SET withdrawn = '0' WHERE withdrawn IS NULL; - --- Migrate bundle -ALTER TABLE bundle RENAME COLUMN primary_bitstream_id to primary_bitstream_legacy_id; -ALTER TABLE bundle ADD primary_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle_primary on bundle(primary_bitstream_id); -UPDATE bundle SET primary_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE bundle.primary_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE bundle DROP COLUMN primary_bitstream_legacy_id; - - --- Migrate community references -ALTER TABLE Community RENAME COLUMN admin to admin_legacy; -ALTER TABLE Community ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Community_admin on Community(admin); -UPDATE Community SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Community.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Community DROP COLUMN admin_legacy; - -ALTER TABLE Community RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Community ADD logo_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX Community_bitstream on Community(logo_bitstream_id); -UPDATE Community SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Community.logo_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE Community DROP COLUMN logo_bitstream_legacy_id; - - ---Migrate Collection references -ALTER TABLE Collection RENAME COLUMN workflow_step_1 to workflow_step_1_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_2 to workflow_step_2_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_3 to workflow_step_3_legacy; -ALTER TABLE Collection RENAME COLUMN submitter to submitter_legacy; -ALTER TABLE Collection RENAME COLUMN template_item_id to template_item_legacy_id; -ALTER TABLE Collection RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Collection RENAME COLUMN admin to admin_legacy; -ALTER TABLE Collection ADD workflow_step_1 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_2 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_3 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD submitter RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD template_item_id RAW(16); -ALTER TABLE Collection ADD logo_bitstream_id RAW(16); -ALTER TABLE Collection ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Collection_workflow1 on Collection(workflow_step_1); -CREATE INDEX Collection_workflow2 on Collection(workflow_step_2); -CREATE INDEX Collection_workflow3 on Collection(workflow_step_3); -CREATE INDEX Collection_submitter on Collection(submitter); -CREATE INDEX Collection_template on Collection(template_item_id); -CREATE INDEX Collection_bitstream on Collection(logo_bitstream_id); -UPDATE Collection SET workflow_step_1 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_1_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_2 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_2_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_3 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_3_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET submitter = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.submitter_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET template_item_id = (SELECT Item.uuid FROM Item WHERE Collection.template_item_legacy_id = Item.item_id); -UPDATE Collection SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Collection.logo_bitstream_legacy_id = Bitstream.bitstream_id); -UPDATE Collection SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Collection DROP COLUMN workflow_step_1_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_2_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_3_legacy; -ALTER TABLE Collection DROP COLUMN submitter_legacy; -ALTER TABLE Collection DROP COLUMN template_item_legacy_id; -ALTER TABLE Collection DROP COLUMN logo_bitstream_legacy_id; -ALTER TABLE Collection DROP COLUMN admin_legacy; - - --- Migrate resource policy references -ALTER TABLE ResourcePolicy RENAME COLUMN eperson_id to eperson_id_legacy_id; -ALTER TABLE ResourcePolicy ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX resourcepolicy_person on resourcepolicy(eperson_id); -UPDATE ResourcePolicy SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.eperson_id_legacy_id = eperson.eperson_id); -ALTER TABLE ResourcePolicy DROP COLUMN eperson_id_legacy_id; - -ALTER TABLE ResourcePolicy RENAME COLUMN epersongroup_id to epersongroup_id_legacy_id; -ALTER TABLE ResourcePolicy ADD epersongroup_id RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX resourcepolicy_group on resourcepolicy(epersongroup_id); -UPDATE ResourcePolicy SET epersongroup_id = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.epersongroup_id_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE ResourcePolicy DROP COLUMN epersongroup_id_legacy_id; - -ALTER TABLE ResourcePolicy ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -UPDATE ResourcePolicy SET dspace_object = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.resource_id = eperson.eperson_id AND ResourcePolicy.resource_type_id = 7) WHERE ResourcePolicy.resource_type_id = 7; -UPDATE ResourcePolicy SET dspace_object = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.resource_id = epersongroup.eperson_group_id AND ResourcePolicy.resource_type_id = 6) WHERE ResourcePolicy.resource_type_id = 6; -UPDATE ResourcePolicy SET dspace_object = (SELECT community.uuid FROM community WHERE ResourcePolicy.resource_id = community.community_id AND ResourcePolicy.resource_type_id = 4) WHERE ResourcePolicy.resource_type_id = 4; -UPDATE ResourcePolicy SET dspace_object = (SELECT collection.uuid FROM collection WHERE ResourcePolicy.resource_id = collection.collection_id AND ResourcePolicy.resource_type_id = 3) WHERE ResourcePolicy.resource_type_id = 3; -UPDATE ResourcePolicy SET dspace_object = (SELECT item.uuid FROM item WHERE ResourcePolicy.resource_id = item.item_id AND ResourcePolicy.resource_type_id = 2) WHERE ResourcePolicy.resource_type_id = 2; -UPDATE ResourcePolicy SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE ResourcePolicy.resource_id = bundle.bundle_id AND ResourcePolicy.resource_type_id = 1) WHERE ResourcePolicy.resource_type_id = 1; -UPDATE ResourcePolicy SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE ResourcePolicy.resource_id = bitstream.bitstream_id AND ResourcePolicy.resource_type_id = 0) WHERE ResourcePolicy.resource_type_id = 0; -UPDATE resourcepolicy SET resource_type_id = -1 WHERE resource_type_id IS NULL; -UPDATE resourcepolicy SET action_id = -1 WHERE action_id IS NULL; - - --- Migrate Subscription -ALTER TABLE Subscription RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE Subscription ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX Subscription_person on Subscription(eperson_id); -UPDATE Subscription SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE Subscription.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE Subscription DROP COLUMN eperson_legacy_id; - -ALTER TABLE Subscription RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Subscription ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX Subscription_collection on Subscription(collection_id); -UPDATE Subscription SET collection_id = (SELECT collection.uuid FROM collection WHERE Subscription.collection_legacy_id = collection.collection_id); -ALTER TABLE Subscription DROP COLUMN collection_legacy_id; - - --- Migrate versionitem -ALTER TABLE versionitem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE versionitem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX versionitem_person on versionitem(eperson_id); -UPDATE versionitem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE versionitem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE versionitem DROP COLUMN eperson_legacy_id; - -ALTER TABLE versionitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE versionitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX versionitem_item on versionitem(item_id); -UPDATE versionitem SET item_id = (SELECT item.uuid FROM item WHERE versionitem.item_legacy_id = item.item_id); -ALTER TABLE versionitem DROP COLUMN item_legacy_id; -UPDATE versionitem SET version_number = -1 WHERE version_number IS NULL; - --- Migrate handle table -ALTER TABLE handle RENAME COLUMN resource_id to resource_legacy_id; -ALTER TABLE handle ADD resource_id RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX handle_object on handle(resource_id); -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4); -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3); -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2); - --- Migrate metadata value table -DROP VIEW dcvalue; - -ALTER TABLE metadatavalue ADD dspace_object_id RAW(16) REFERENCES dspaceobject(uuid); --- CREATE INDEX metadatavalue_field on metadatavalue(metadata_field_id); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); -UPDATE metadatavalue SET dspace_object_id = (SELECT eperson.uuid FROM eperson WHERE metadatavalue.resource_id = eperson.eperson_id AND metadatavalue.resource_type_id = 7) WHERE metadatavalue.resource_type_id= 7; -UPDATE metadatavalue SET dspace_object_id = (SELECT epersongroup.uuid FROM epersongroup WHERE metadatavalue.resource_id = epersongroup.eperson_group_id AND metadatavalue.resource_type_id = 6) WHERE metadatavalue.resource_type_id= 6; -UPDATE metadatavalue SET dspace_object_id = (SELECT community.uuid FROM community WHERE metadatavalue.resource_id = community.community_id AND metadatavalue.resource_type_id = 4) WHERE metadatavalue.resource_type_id= 4; -UPDATE metadatavalue SET dspace_object_id = (SELECT collection.uuid FROM collection WHERE metadatavalue.resource_id = collection.collection_id AND metadatavalue.resource_type_id = 3) WHERE metadatavalue.resource_type_id= 3; -UPDATE metadatavalue SET dspace_object_id = (SELECT item.uuid FROM item WHERE metadatavalue.resource_id = item.item_id AND metadatavalue.resource_type_id = 2) WHERE metadatavalue.resource_type_id= 2; -UPDATE metadatavalue SET dspace_object_id = (SELECT bundle.uuid FROM bundle WHERE metadatavalue.resource_id = bundle.bundle_id AND metadatavalue.resource_type_id = 1) WHERE metadatavalue.resource_type_id= 1; -UPDATE metadatavalue SET dspace_object_id = (SELECT bitstream.uuid FROM bitstream WHERE metadatavalue.resource_id = bitstream.bitstream_id AND metadatavalue.resource_type_id = 0) WHERE metadatavalue.resource_type_id= 0; -DROP INDEX metadatavalue_item_idx; -DROP INDEX metadatavalue_item_idx2; -ALTER TABLE metadatavalue DROP COLUMN resource_id; -ALTER TABLE metadatavalue DROP COLUMN resource_type_id; -UPDATE MetadataValue SET confidence = -1 WHERE confidence IS NULL; -UPDATE metadatavalue SET place = -1 WHERE place IS NULL; - --- Alter harvested item -ALTER TABLE harvested_item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE harvested_item ADD item_id RAW(16) REFERENCES item(uuid); -CREATE INDEX harvested_item_item on harvested_item(item_id); -UPDATE harvested_item SET item_id = (SELECT item.uuid FROM item WHERE harvested_item.item_legacy_id = item.item_id); -ALTER TABLE harvested_item DROP COLUMN item_legacy_id; - --- Alter harvested collection -ALTER TABLE harvested_collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE harvested_collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX harvested_collection_collectio on harvested_collection(collection_id); -UPDATE harvested_collection SET collection_id = (SELECT collection.uuid FROM collection WHERE harvested_collection.collection_legacy_id = collection.collection_id); -ALTER TABLE harvested_collection DROP COLUMN collection_legacy_id; - -UPDATE harvested_collection SET harvest_type = -1 WHERE harvest_type IS NULL; -UPDATE harvested_collection SET harvest_status = -1 WHERE harvest_status IS NULL; - - ---Alter workspaceitem -ALTER TABLE workspaceitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workspaceitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX workspaceitem_item on workspaceitem(item_id); -UPDATE workspaceitem SET item_id = (SELECT item.uuid FROM item WHERE workspaceitem.item_legacy_id = item.item_id); -ALTER TABLE workspaceitem DROP COLUMN item_legacy_id; - -ALTER TABLE workspaceitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workspaceitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX workspaceitem_coll on workspaceitem(collection_id); -UPDATE workspaceitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workspaceitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workspaceitem DROP COLUMN collection_legacy_id; - -UPDATE workspaceitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workspaceitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workspaceitem SET multiple_files = '0' WHERE multiple_files IS NULL; -UPDATE workspaceitem SET stage_reached = -1 WHERE stage_reached IS NULL; -UPDATE workspaceitem SET page_reached = -1 WHERE page_reached IS NULL; - ---Alter epersongroup2workspaceitem -ALTER TABLE epersongroup2workspaceitem RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE epersongroup2workspaceitem ADD eperson_group_id RAW(16) REFERENCES epersongroup(uuid); -CREATE INDEX epersongroup2workspaceitem_gro on epersongroup2workspaceitem(eperson_group_id); -UPDATE epersongroup2workspaceitem SET eperson_group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE epersongroup2workspaceitem.eperson_group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE epersongroup2workspaceitem DROP COLUMN eperson_group_legacy_id; - -ALTER TABLE epersongroup2workspaceitem DROP COLUMN id; -ALTER TABLE epersongroup2workspaceitem MODIFY workspace_item_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem MODIFY eperson_group_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem add CONSTRAINT epersongroup2wsitem_unqiue primary key (workspace_item_id,eperson_group_id); - ---Alter most_recent_checksum -ALTER TABLE most_recent_checksum RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE most_recent_checksum ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX most_recent_checksum_bitstream on most_recent_checksum(bitstream_id); -UPDATE most_recent_checksum SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE most_recent_checksum.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE most_recent_checksum DROP COLUMN bitstream_legacy_id; - -UPDATE most_recent_checksum SET to_be_processed = '0' WHERE to_be_processed IS NULL; -UPDATE most_recent_checksum SET matched_prev_checksum = '0' WHERE matched_prev_checksum IS NULL; - ---Alter checksum_history -ALTER TABLE checksum_history RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE checksum_history ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX checksum_history_bitstream on checksum_history(bitstream_id); -UPDATE checksum_history SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE checksum_history.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE checksum_history DROP COLUMN bitstream_legacy_id; - -RENAME checksum_history_seq TO checksum_history_check_id_seq; - ---Alter table doi -ALTER TABLE doi ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX doi_object on doi(dspace_object); -UPDATE doi SET dspace_object = (SELECT community.uuid FROM community WHERE doi.resource_id = community.community_id AND doi.resource_type_id = 4) WHERE doi.resource_type_id = 4; -UPDATE doi SET dspace_object = (SELECT collection.uuid FROM collection WHERE doi.resource_id = collection.collection_id AND doi.resource_type_id = 3) WHERE doi.resource_type_id = 3; -UPDATE doi SET dspace_object = (SELECT item.uuid FROM item WHERE doi.resource_id = item.item_id AND doi.resource_type_id = 2) WHERE doi.resource_type_id = 2; -UPDATE doi SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE doi.resource_id = bundle.bundle_id AND doi.resource_type_id = 1) WHERE doi.resource_type_id = 1; -UPDATE doi SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE doi.resource_id = bitstream.bitstream_id AND doi.resource_type_id = 0) WHERE doi.resource_type_id = 0; - ---Update table bitstreamformatregistry -UPDATE bitstreamformatregistry SET support_level = -1 WHERE support_level IS NULL; - ---Update table requestitem -UPDATE requestitem SET allfiles = '0' WHERE allfiles IS NULL; -UPDATE requestitem SET accept_request = '0' WHERE accept_request IS NULL; - ---Update table webapp -UPDATE webapp SET isui = -1 WHERE isui IS NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql deleted file mode 100644 index 8f1a7ad157a2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS_3378 Lost oracle indexes ------------------------------------------------------- -CREATE UNIQUE INDEX eperson_eperson on eperson(eperson_id); -CREATE UNIQUE INDEX epersongroup_eperson_group on epersongroup(eperson_group_id); -CREATE UNIQUE INDEX community_community on community(community_id); -CREATE UNIQUE INDEX collection_collection on collection(collection_id); -CREATE UNIQUE INDEX item_item on item(item_id); -CREATE UNIQUE INDEX bundle_bundle on bundle(bundle_id); -CREATE UNIQUE INDEX bitstream_bitstream on bitstream(bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql deleted file mode 100644 index 8ad6f7fcd247..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3024 Invent "permanent" groups ------------------------------------------------------- - -ALTER TABLE epersongroup - ADD (permanent NUMBER(1) DEFAULT 0); -UPDATE epersongroup SET permanent = 1 - WHERE uuid IN ( - SELECT dspace_object_id - FROM metadataschemaregistry s - JOIN metadatafieldregistry f USING (metadata_schema_id) - JOIN metadatavalue v USING (metadata_field_id) - WHERE s.short_id = 'dc' - AND f.element = 'title' - AND f.qualifier IS NULL - AND dbms_lob.compare(v.text_value, 'Administrator') = 0 OR dbms_lob.compare(v.text_value,'Anonymous') = 0 - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql deleted file mode 100644 index 18cb4a50841d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3024 extremely slow searching when logged in as admin ---------------------------------------------------------------- --- This script will put the group name on the epersongroup --- record itself for performance reasons. It will also make --- sure that a group name is unique (so that for example no two --- Administrator groups can be created). ---------------------------------------------------------------- - -ALTER TABLE epersongroup -ADD name VARCHAR2(250); - -CREATE UNIQUE INDEX epersongroup_unique_idx_name on epersongroup(name); - -UPDATE epersongroup -SET name = -(SELECT text_value - FROM metadatavalue v - JOIN metadatafieldregistry field on v.metadata_field_id = field.metadata_field_id - JOIN metadataschemaregistry s ON field.metadata_schema_id = s.metadata_schema_id - WHERE s.short_id = 'dc' AND element = 'title' AND qualifier IS NULL - AND v.dspace_object_id = epersongroup.uuid); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql deleted file mode 100644 index e0a103749c2b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1955 resize rpdescription for embargo reason ------------------------------------------------------- - --- We cannot alter type between varchar2 & clob directly so an in between column is required -ALTER TABLE resourcepolicy ADD rpdescription_clob CLOB; -UPDATE resourcepolicy SET rpdescription_clob=rpdescription, rpdescription=null; -ALTER TABLE resourcepolicy DROP COLUMN rpdescription; -ALTER TABLE resourcepolicy RENAME COLUMN rpdescription_clob TO rpdescription; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql deleted file mode 100644 index 7b13d10b6d4f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql +++ /dev/null @@ -1,46 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3086 OAI Harvesting performance ---------------------------------------------------------------- --- This script will create indexes on the key fields of the --- metadataschemaregistry and metadatafieldregistry tables to --- increase the performance of the queries. It will also add --- "ON DELETE CASCADE" to improve the performance of Item deletion. ---------------------------------------------------------------- - -CREATE UNIQUE INDEX metadataschema_idx_short_id on metadataschemaregistry(short_id); - -CREATE INDEX metadatafield_idx_elem_qual on metadatafieldregistry(element, qualifier); - -CREATE INDEX resourcepolicy_idx_rptype on resourcepolicy(rptype); - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE RESOURCEPOLICY ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE RESOURCEPOLICY SET DSPACE_OBJECT_NEW = DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY DROP COLUMN DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT; - -ALTER TABLE RESOURCEPOLICY -ADD CONSTRAINT RESOURCEPOLICY_DSPACE_OBJ_FK -FOREIGN KEY (DSPACE_OBJECT) -REFERENCES dspaceobject(uuid) -ON DELETE CASCADE; - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE METADATAVALUE ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE METADATAVALUE SET DSPACE_OBJECT_NEW = DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE DROP COLUMN DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT_ID; - -ALTER TABLE METADATAVALUE -ADD CONSTRAINT METADATAVALUE_DSPACE_OBJECT_FK -FOREIGN KEY (DSPACE_OBJECT_ID) -REFERENCES DSPACEOBJECT(UUID) -ON DELETE CASCADE; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql deleted file mode 100644 index a1b303f0365a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3125 Submitters cannot delete bistreams of workspaceitems ---------------------------------------------------------------- --- This script will add delete rights on all bundles/bitstreams --- for people who already have REMOVE rights. --- In previous versions REMOVE rights was enough to ensure that --- you could delete an object. ---------------------------------------------------------------- -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, -rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) -SELECT -resourcepolicy_seq.nextval AS policy_id, -resource_type_id, -resource_id, --- Insert the Constants.DELETE action -2 AS action_id, -start_date, -end_date, -rpname, -rptype, -rpdescription, -eperson_id, -epersongroup_id, -dspace_object -FROM resourcepolicy WHERE action_id=4 AND (resource_type_id=0 OR resource_type_id=1 OR resource_type_id=2); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql deleted file mode 100644 index 2ba3517e1988..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3168 Embargo request Unknown Entity RequestItem ---------------------------------------------------------------- --- convert the item_id and bitstream_id columns from integer to UUID ---------------------------------------------------------------- -ALTER TABLE requestitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE requestitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX requestitem_item on requestitem(item_id); -UPDATE requestitem SET item_id = (SELECT item.uuid FROM item WHERE requestitem.item_legacy_id = item.item_id); -ALTER TABLE requestitem DROP COLUMN item_legacy_id; - -ALTER TABLE requestitem RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE requestitem ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX requestitem_bitstream on requestitem(bitstream_id); -UPDATE requestitem SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE requestitem.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE requestitem DROP COLUMN bitstream_legacy_id; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql deleted file mode 100644 index 74783974468c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2775 Drop unused sequences ------------------------------------------------------- - -DROP SEQUENCE bitstream_seq; -DROP SEQUENCE bundle2bitstream_seq; -DROP SEQUENCE bundle_seq; -DROP SEQUENCE collection2item_seq; -DROP SEQUENCE collection_seq; -DROP SEQUENCE community2collection_seq; -DROP SEQUENCE community2community_seq; -DROP SEQUENCE community_seq; -DROP SEQUENCE dcvalue_seq; -DROP SEQUENCE eperson_seq; -DROP SEQUENCE epersongroup2eperson_seq; -DROP SEQUENCE epersongroup2workspaceitem_seq; -DROP SEQUENCE epersongroup_seq; -DROP SEQUENCE group2group_seq; -DROP SEQUENCE group2groupcache_seq; -DROP SEQUENCE historystate_seq; -DROP SEQUENCE item2bundle_seq; -DROP SEQUENCE item_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql deleted file mode 100644 index 96f125f78b61..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql +++ /dev/null @@ -1,44 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------------------------------------- --- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles --- can be minted from 'handle_seq' ----------------------------------------------------------------------------------- --- Create a new sequence for 'handle_id' column. --- The role of this sequence is to simply provide a unique internal ID to the database. -CREATE SEQUENCE handle_id_seq; --- Initialize new 'handle_id_seq' to the maximum value of 'handle_id' -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(handle_id) INTO curr FROM handle; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1); -END; -/ - --- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column, --- as this sequence is used to mint new Handles. --- Code borrowed from update-sequences.sql and updateseq.sql -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$'); - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1); -END; -/ \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql deleted file mode 100644 index e1220c8c7cce..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql deleted file mode 100644 index 5c3c3842aaea..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3410 ---------------------------------------------------------------- --- This script will create lost indexes ---------------------------------------------------------------- - -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql deleted file mode 100644 index 47b2d18be8a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql +++ /dev/null @@ -1,16 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql ------------------------------------------------------- - -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4; -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3; -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2; - \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql deleted file mode 100644 index 30cfae91c83a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------------------------------------------------- --- This adds an extra column to the eperson table where we save a salt for stateless authentication ------------------------------------------------------------------------------------------------------------- -ALTER TABLE eperson ADD session_salt varchar(32); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql deleted file mode 100644 index fc1c0b2e2319..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql +++ /dev/null @@ -1,65 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the dspace 7 entities usage -------------------------------------------------------------- -CREATE SEQUENCE entity_type_id_seq; -CREATE SEQUENCE relationship_type_id_seq; -CREATE SEQUENCE relationship_id_seq; - -CREATE TABLE entity_type -( - id INTEGER NOT NULL PRIMARY KEY, - label varchar(32) UNIQUE NOT NULL -); - -CREATE TABLE relationship_type -( - id INTEGER NOT NULL PRIMARY KEY, - left_type INTEGER NOT NULL, - right_type INTEGER NOT NULL, - left_label varchar(32) NOT NULL, - right_label varchar(32) NOT NULL, - left_min_cardinality INTEGER, - left_max_cardinality INTEGER, - right_min_cardinality INTEGER, - right_max_cardinality INTEGER, - FOREIGN KEY (left_type) REFERENCES entity_type(id), - FOREIGN KEY (right_type) REFERENCES entity_type(id), - CONSTRAINT u_relationship_type_constraint UNIQUE (left_type, right_type, left_label, right_label) - -); - -CREATE TABLE relationship -( - id INTEGER NOT NULL PRIMARY KEY, - left_id raw(16) NOT NULL REFERENCES item(uuid), - type_id INTEGER NOT NULL REFERENCES relationship_type(id), - right_id raw(16) NOT NULL REFERENCES item(uuid), - left_place INTEGER, - right_place INTEGER, - CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id) - -); - -CREATE INDEX entity_type_label_idx ON entity_type(label); -CREATE INDEX rl_ty_by_left_type_idx ON relationship_type(left_type); -CREATE INDEX rl_ty_by_right_type_idx ON relationship_type(right_type); -CREATE INDEX rl_ty_by_left_label_idx ON relationship_type(left_label); -CREATE INDEX rl_ty_by_right_label_idx ON relationship_type(right_label); -CREATE INDEX relationship_by_left_id_idx ON relationship(left_id); -CREATE INDEX relationship_by_right_id_idx ON relationship(right_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql deleted file mode 100644 index 68ed690f89e8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ----------------------------------------------------------------------------------------------------------------- --- This adds TYPE_INHERITED to all old archived items permission due to the change on resource policy management ----------------------------------------------------------------------------------------------------------------- -UPDATE resourcepolicy set rptype = 'TYPE_INHERITED' - where resource_type_id = 2 and rptype is null - and dspace_object in ( - select uuid from item where in_archive = 1 - ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql deleted file mode 100644 index b23170f43732..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-4239 Migrate the workflow.xml to spring ---------------------------------------------------------------- --- This script will rename the default workflow "default" name --- to the new "defaultWorkflow" identifier ---------------------------------------------------------------- - -UPDATE cwf_pooltask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; -UPDATE cwf_claimtask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql deleted file mode 100644 index cebae09f651c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns leftwardValue and rightwardValue in table relationship --- Rename columns left_label and right_label to leftward_type and rightward_type ------------------------------------------------------------------------------------ - -ALTER TABLE relationship ADD leftward_value VARCHAR2(50); -ALTER TABLE relationship ADD rightward_value VARCHAR2(50); - -ALTER TABLE relationship_type RENAME COLUMN left_label TO leftward_type; -ALTER TABLE relationship_type RENAME COLUMN right_label TO rightward_type; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql deleted file mode 100644 index 0db294c1c13a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql +++ /dev/null @@ -1,14 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD copy_to_left NUMBER(1) DEFAULT 0 NOT NULL; -ALTER TABLE relationship_type ADD copy_to_right NUMBER(1) DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql deleted file mode 100644 index a7015e3033bf..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql +++ /dev/null @@ -1,40 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== -CREATE SEQUENCE process_id_seq; - -CREATE TABLE process -( - process_id INTEGER NOT NULL PRIMARY KEY, - user_id RAW(16) NOT NULL, - start_time TIMESTAMP, - finished_time TIMESTAMP, - creation_time TIMESTAMP NOT NULL, - script VARCHAR(256) NOT NULL, - status VARCHAR(32), - parameters VARCHAR(512) -); - -CREATE TABLE process2bitstream -( - process_id INTEGER REFERENCES process(process_id), - bitstream_id RAW(16) REFERENCES bitstream(uuid), - CONSTRAINT PK_process2bitstream PRIMARY KEY (process_id, bitstream_id) -); - -CREATE INDEX process_user_id_idx ON process(user_id); -CREATE INDEX process_status_idx ON process(status); -CREATE INDEX process_name_idx on process(script); -CREATE INDEX process_start_time_idx on process(start_time); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql deleted file mode 100644 index a108fd74b468..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the IRUS statistics harvester -------------------------------------------------------------- - -CREATE SEQUENCE openurltracker_seq; - -CREATE TABLE openurltracker -( - tracker_id NUMBER, - tracker_url VARCHAR2(1000), - uploaddate DATE, - CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql deleted file mode 100644 index f71173abe607..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Drop the 'workflowitem' and 'tasklistitem' tables ------------------------------------------------------------------------------------ - -DROP TABLE workflowitem CASCADE CONSTRAINTS; -DROP TABLE tasklistitem CASCADE CONSTRAINTS; - -DROP SEQUENCE workflowitem_seq; -DROP SEQUENCE tasklistitem_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql deleted file mode 100644 index 95d07be477d5..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql +++ /dev/null @@ -1,13 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD tilted INTEGER; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql deleted file mode 100644 index 9c39091f89dc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql +++ /dev/null @@ -1,56 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- --- Move all 'relationship.type' metadata fields to 'dspace.entity.type'. Remove 'relationship' schema. -------------------------------------------------------------------------------------------------------- --- Special case: we need to the 'dspace' schema to already exist. If users don't already have it we must create it --- manually via SQL, as by default it won't be created until database updates are finished. -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) - SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/dspace' as namespace, 'dspace' as short_id FROM dual - WHERE NOT EXISTS - (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry - WHERE namespace = 'http://dspace.org/dspace' AND short_id = 'dspace'); - - --- Add 'dspace.entity.type' field to registry (if missing) -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace'), 'entity', 'type' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entitye' AND qualifier='type'); - --- Moves all 'relationship.type' field values to a new 'dspace.entity.type' field -UPDATE metadatavalue - SET metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entity' AND qualifier='type') - WHERE metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='relationship') - AND element = 'type' AND qualifier is NULL); - - --- Delete 'relationship.type' field from registry -DELETE FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id = 'relationship') - AND element = 'type' AND qualifier is NULL; - --- Delete 'relationship' schema (which is now empty) -DELETE FROM metadataschemaregistry WHERE short_id = 'relationship' AND namespace = 'http://dspace.org/relationship'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql deleted file mode 100644 index 5a6abda04101..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql +++ /dev/null @@ -1,28 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------- -UPDATE metadatavalue SET dspace_object_id = (SELECT uuid - FROM collection - WHERE template_item_id = dspace_object_id) -WHERE dspace_object_id IN (SELECT template_item_id - FROM Collection) - AND metadata_field_id - IN (SELECT metadata_field_id - FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql deleted file mode 100644 index ae8f1e7ef5d2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql +++ /dev/null @@ -1,15 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------------- ----- ALTER table collection -------------------------------------------------------------------------------------- - -ALTER TABLE collection DROP COLUMN workflow_step_1; -ALTER TABLE collection DROP COLUMN workflow_step_2; -ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql deleted file mode 100644 index 9c39c15e66e2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Make sure the metadatavalue.place column starts at 0 instead of 1 ----------------------------------------------------- -MERGE INTO metadatavalue mdv -USING ( - SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace - FROM metadatavalue - GROUP BY dspace_object_id, metadata_field_id -) mp -ON ( - mdv.dspace_object_id = mp.dspace_object_id - AND mdv.metadata_field_id = mp.metadata_field_id - AND mp.minplace > 0 -) -WHEN MATCHED THEN UPDATE -SET mdv.place = mdv.place - mp.minplace; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql deleted file mode 100644 index 3fe424cf6cfc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql +++ /dev/null @@ -1,54 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create tables for ORCID Queue and History ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_queue_id_seq; - -CREATE TABLE orcid_queue -( - id INTEGER NOT NULL, - owner_id RAW(16) NOT NULL, - entity_id RAW(16), - put_code VARCHAR(255), - record_type VARCHAR(255), - description VARCHAR(255), - operation VARCHAR(255), - metadata CLOB, - attempts INTEGER, - CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), - CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), - CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) -); - -CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); - - -CREATE SEQUENCE orcid_history_id_seq; - -CREATE TABLE orcid_history -( - id INTEGER NOT NULL, - owner_id RAW(16) NOT NULL, - entity_id RAW(16), - put_code VARCHAR(255), - timestamp_last_attempt TIMESTAMP, - response_message CLOB, - status INTEGER, - metadata CLOB, - operation VARCHAR(255), - record_type VARCHAR(255), - description VARCHAR(255), - CONSTRAINT orcid_history_pkey PRIMARY KEY (id), - CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), - CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) -); - -CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql deleted file mode 100644 index 14bf8531439f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for ORCID access tokens ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_token_id_seq; - -CREATE TABLE orcid_token -( - id INTEGER NOT NULL, - eperson_id RAW(16) NOT NULL UNIQUE, - profile_item_id RAW(16), - access_token VARCHAR2(100) NOT NULL, - CONSTRAINT orcid_token_pkey PRIMARY KEY (id), - CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), - CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql deleted file mode 100644 index 0e7d417ae52d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store Groups related to a Process on its creation -------------------------------------------------------------------------------- - -CREATE TABLE Process2Group -( - process_id INTEGER REFERENCES Process(process_id), - group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, - CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql deleted file mode 100644 index 3eb9ae6dd4f8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) -ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql deleted file mode 100644 index 3862830230e3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql +++ /dev/null @@ -1,45 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ - - -CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ -CREATE TABLE if NOT EXISTS subscription_parameter -( - subscription_parameter_id INTEGER NOT NULL, - name VARCHAR(255), - value VARCHAR(255), - subscription_id INTEGER NOT NULL, - CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), - CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) - REFERENCES subscription (subscription_id) ON DELETE CASCADE -); --- -- - -ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; ----- -- -ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); --- -UPDATE subscription SET dspace_object_id = collection_id , type = 'content'; --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; -ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_collection_id_fkey; ----- -- -ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; --- -- -INSERT INTO subscription_parameter (subscription_parameter_id, name, value, subscription_id) -SELECT getnextid('subscription_parameter'), 'frequency', 'D', subscription_id from "subscription" ; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql deleted file mode 100644 index c7bb0b502ec2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql +++ /dev/null @@ -1,78 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store supervision orders -------------------------------------------------------------------------------- - -CREATE TABLE supervision_orders -( - id INTEGER PRIMARY KEY, - item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, - eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE -); - -CREATE SEQUENCE supervision_orders_seq; - -INSERT INTO supervision_orders (id, item_id, eperson_group_id) -SELECT supervision_orders_seq.nextval AS id, w.item_id, e.uuid -FROM epersongroup2workspaceitem ew INNER JOIN workspaceitem w -ON ew.workspace_item_id = w.workspace_item_id -INNER JOIN epersongroup e -ON ew.eperson_group_id = e.uuid; - - --- UPDATE policies for supervision orders --- items, bundles and bitstreams - -DECLARE -BEGIN - -FOR rec IN -( -SELECT so.item_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN RESOURCEPOLICY rp on so.item_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT ib.bundle_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN RESOURCEPOLICY rp on ib.bundle_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT bs.bitstream_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN bundle2bitstream bs ON ib.bundle_id = bs.bundle_id -INNER JOIN RESOURCEPOLICY rp on bs.bitstream_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL -) - -LOOP - -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_SUBMISSION' -where dspace_object = rec.dspace_object -AND epersongroup_id = rec.eperson_group_id -AND rptype IS NULL; - -END LOOP; -END; - -------------------------------------------------------------------------------- --- drop epersongroup2workspaceitem table -------------------------------------------------------------------------------- - -DROP TABLE epersongroup2workspaceitem; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql deleted file mode 100644 index 9d13138fdada..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql +++ /dev/null @@ -1,22 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for System wide alerts ------------------------------------------------------------------------------------ - -CREATE SEQUENCE alert_id_seq; - -CREATE TABLE systemwidealert -( - alert_id INTEGER NOT NULL PRIMARY KEY, - message VARCHAR(512), - allow_sessions VARCHAR(64), - countdown_to TIMESTAMP, - active BOOLEAN -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql deleted file mode 100644 index 8aec44a7f6f2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Update short description for PNG mimetype in the bitstream format registry --- See: https://github.com/DSpace/DSpace/pull/8722 ------------------------------------------------------------------------------------ - -UPDATE bitstreamformatregistry -SET short_description='PNG' -WHERE short_description='image/png' - AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql deleted file mode 100644 index 509e0a286935..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -ALTER TABLE orcid_history MODIFY (description CLOB); -ALTER TABLE orcid_queue MODIFY (description CLOB); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql deleted file mode 100644 index b4d4d755cbe7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql +++ /dev/null @@ -1,77 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- SQL code to update the ID (primary key) generating sequences, if some --- import operation has set explicit IDs. --- --- Sequences are used to generate IDs for new rows in the database. If a --- bulk import operation, such as an SQL dump, specifies primary keys for --- imported data explicitly, the sequences are out of sync and need updating. --- This SQL code does just that. --- --- This should rarely be needed; any bulk import should be performed using the --- org.dspace.content API which is safe to use concurrently and in multiple --- JVMs. The SQL code below will typically only be required after a direct --- SQL data dump from a backup or somesuch. - --- The 'updateseq' procedure was derived from incseq.sql found at: --- http://www.akadia.com/services/scripts/incseq.sql - -DECLARE - PROCEDURE updateseq ( seq IN VARCHAR, - tbl IN VARCHAR, - attr IN VARCHAR, - cond IN VARCHAR DEFAULT '' ) IS - curr NUMBER := 0; - BEGIN - EXECUTE IMMEDIATE 'SELECT max(' || attr - || ') FROM ' || tbl - || ' ' || cond - INTO curr; - curr := curr + 1; - EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq; - EXECUTE IMMEDIATE 'CREATE SEQUENCE ' - || seq - || ' START WITH ' - || NVL(curr, 1); - END updateseq; - -BEGIN - updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry', - 'bitstream_format_id'); - updateseq('fileextension_seq', 'fileextension', 'file_extension_id'); - updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id'); - updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id'); - updateseq('registrationdata_seq', 'registrationdata', - 'registrationdata_id'); - updateseq('subscription_seq', 'subscription', 'subscription_id'); - updateseq('metadatafieldregistry_seq', 'metadatafieldregistry', - 'metadata_field_id'); - updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id'); - updateseq('metadataschemaregistry_seq', 'metadataschemaregistry', - 'metadata_schema_id'); - updateseq('harvested_collection_seq', 'harvested_collection', 'id'); - updateseq('harvested_item_seq', 'harvested_item', 'id'); - updateseq('webapp_seq', 'webapp', 'webapp_id'); - updateseq('requestitem_seq', 'requestitem', 'requestitem_id'); - updateseq('handle_id_seq', 'handle', 'handle_id'); - - -- Handle Sequence is a special case. Since Handles minted by DSpace - -- use the 'handle_seq', we need to ensure the next assigned handle - -- will *always* be unique. So, 'handle_seq' always needs to be set - -- to the value of the *largest* handle suffix. That way when the - -- next handle is assigned, it will use the next largest number. This - -- query does the following: - -- For all 'handle' values which have a number in their suffix - -- (after '/'), find the maximum suffix value, convert it to a - -- number, and set the 'handle_seq' to start at the next value (see - -- updateseq above for more). - updateseq('handle_seq', 'handle', - q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}', - q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}'); -END; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql deleted file mode 100644 index 9bca3a17c99e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql deleted file mode 100644 index 917078594cfa..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql +++ /dev/null @@ -1,37 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- --- Alter workflow item -ALTER TABLE workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE workflowitem SET item_id = (SELECT item.uuid FROM item WHERE workflowitem.item_legacy_id = item.item_id); -ALTER TABLE workflowitem DROP COLUMN item_legacy_id; - --- Migrate task list item -ALTER TABLE TasklistItem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE TasklistItem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -UPDATE TasklistItem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE TasklistItem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE TasklistItem DROP COLUMN eperson_legacy_id; - --- Migrate task workflow item -ALTER TABLE workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workflowitem DROP COLUMN collection_legacy_id; -ALTER TABLE workflowitem RENAME COLUMN owner to owner_legacy_id; -ALTER TABLE workflowitem ADD owner RAW(16) REFERENCES EPerson (uuid); -UPDATE workflowitem SET owner = (SELECT eperson.uuid FROM eperson WHERE workflowitem.owner_legacy_id = eperson.eperson_id); -ALTER TABLE workflowitem DROP COLUMN owner_legacy_id; -UPDATE workflowitem SET state = -1 WHERE state IS NULL; -UPDATE workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql deleted file mode 100644 index b3887a5af4d1..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND dspace_object = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql deleted file mode 100644 index 7a992836eea6..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql +++ /dev/null @@ -1,141 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -UPDATE collection SET workflow_step_1 = null; -UPDATE collection SET workflow_step_2 = null; -UPDATE collection SET workflow_step_3 = null; - --- cwf_workflowitem - -DROP INDEX cwf_workflowitem_coll_fk_idx; - -ALTER TABLE cwf_workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE cwf_workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE cwf_workflowitem SET item_id = (SELECT item.uuid FROM item WHERE cwf_workflowitem.item_legacy_id = item.item_id); -ALTER TABLE cwf_workflowitem DROP COLUMN item_legacy_id; - -ALTER TABLE cwf_workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_workflowitem DROP COLUMN collection_legacy_id; - -UPDATE cwf_workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE cwf_workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE cwf_workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - --- cwf_collectionrole - -ALTER TABLE cwf_collectionrole DROP CONSTRAINT cwf_collectionrole_unique; -DROP INDEX cwf_cr_coll_role_fk_idx; -DROP INDEX cwf_cr_coll_fk_idx; - -ALTER TABLE cwf_collectionrole RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_collectionrole ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_collectionrole SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_collectionrole.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_collectionrole DROP COLUMN collection_legacy_id; - -ALTER TABLE cwf_collectionrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_collectionrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_collectionrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_collectionrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_collectionrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - --- cwf_workflowitemrole - -ALTER TABLE cwf_workflowitemrole DROP CONSTRAINT cwf_workflowitemrole_unique; -DROP INDEX cwf_wfir_item_role_fk_idx; -DROP INDEX cwf_wfir_item_fk_idx; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_workflowitemrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_workflowitemrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_workflowitemrole SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_workflowitemrole.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN eperson_legacy_id; - - -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - --- cwf_pooltask - -DROP INDEX cwf_pt_eperson_fk_idx; -DROP INDEX cwf_pt_workflow_eperson_fk_idx; - -ALTER TABLE cwf_pooltask RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_pooltask ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_pooltask SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_pooltask.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_pooltask DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_pooltask RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_pooltask ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_pooltask SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_pooltask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_pooltask DROP COLUMN eperson_legacy_id; - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - --- cwf_claimtask - -ALTER TABLE cwf_claimtask DROP CONSTRAINT cwf_claimtask_unique; -DROP INDEX cwf_ct_workflow_fk_idx; -DROP INDEX cwf_ct_workflow_eperson_fk_idx; -DROP INDEX cwf_ct_eperson_fk_idx; -DROP INDEX cwf_ct_wfs_fk_idx; -DROP INDEX cwf_ct_wfs_action_fk_idx; -DROP INDEX cwf_ct_wfs_action_e_fk_idx; - -ALTER TABLE cwf_claimtask RENAME COLUMN owner_id to eperson_legacy_id; -ALTER TABLE cwf_claimtask ADD owner_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_claimtask SET owner_id = (SELECT eperson.uuid FROM eperson WHERE cwf_claimtask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_claimtask DROP COLUMN eperson_legacy_id; - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - --- cwf_in_progress_user - -ALTER TABLE cwf_in_progress_user DROP CONSTRAINT cwf_in_progress_user_unique; -DROP INDEX cwf_ipu_workflow_fk_idx; -DROP INDEX cwf_ipu_eperson_fk_idx; - -ALTER TABLE cwf_in_progress_user RENAME COLUMN user_id to eperson_legacy_id; -ALTER TABLE cwf_in_progress_user ADD user_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_in_progress_user SET user_id = (SELECT eperson.uuid FROM eperson WHERE cwf_in_progress_user.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_in_progress_user DROP COLUMN eperson_legacy_id; -UPDATE cwf_in_progress_user SET finished = '0' WHERE finished IS NULL; - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql deleted file mode 100644 index 0402fc994887..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql +++ /dev/null @@ -1,27 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- UPDATE policies for claimtasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id JOIN item ON cwf_workflowitem.item_id = item.uuid) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT item2bundle.bundle_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT bundle2bitstream.bitstream_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Create policies for pooled tasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql deleted file mode 100644 index f582f37c6931..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE resource_type_id = 2 AND resource_id IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 1 AND resource_id IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 0 AND resource_id IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql deleted file mode 100644 index 70eb419d8fbb..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql deleted file mode 100644 index 541af73dfe01..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0) --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id RAW(16) REFERENCES item(uuid) UNIQUE, - collection_id RAW(16) REFERENCES collection(uuid), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id RAW(16) REFERENCES collection(uuid), -group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id RAW(16) REFERENCES eperson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id RAW(16) REFERENCES EPerson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id RAW(16) REFERENCES eperson(uuid) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id RAW(16) REFERENCES eperson(uuid), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql deleted file mode 100644 index f8f0e564e824..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES item(item_id) UNIQUE, - collection_id INTEGER REFERENCES collection(collection_id), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id integer REFERENCES collection(collection_id), -group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id integer REFERENCES eperson(eperson_id), - group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id INTEGER REFERENCES EPerson(eperson_id), - group_id INTEGER REFERENCES epersongroup(eperson_group_id) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id integer REFERENCES eperson(eperson_id) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id integer REFERENCES eperson(eperson_id), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - From 7a209d228aef5689132656a35dca7849b99813b9 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 3 May 2023 13:37:36 -0500 Subject: [PATCH 154/686] Remove oracle specific Java code / comments. --- .../org/dspace/eperson/GroupServiceImpl.java | 2 - .../dspace/storage/rdbms/DatabaseUtils.java | 50 ++----------------- .../rdbms/migration/MigrationUtils.java | 16 ------ ...Drop_constraint_for_DSpace_1_4_schema.java | 5 +- ...Drop_constraint_for_DSpace_1_6_schema.java | 5 +- ...adata_For_All_Objects_drop_constraint.java | 5 +- ...4_11_04__Enable_XMLWorkflow_Migration.java | 2 - ..._DS_2701_Enable_XMLWorkflow_Migration.java | 2 - .../postgres/upgradeToFlyway4x.sql | 2 +- 9 files changed, 12 insertions(+), 77 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index f3dc6ca36a65..607e57af0b2c 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -353,8 +353,6 @@ public Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQ List groupCache = group2GroupCacheDAO.findByChildren(context, groups); // now we have all owning groups, also grab all parents of owning groups - // yes, I know this could have been done as one big query and a union, - // but doing the Oracle port taught me to keep to simple SQL! for (Group2GroupCache group2GroupCache : groupCache) { groups.add(group2GroupCache.getParent()); } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index 1464fb44ecda..89010a73087f 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -75,7 +75,6 @@ public class DatabaseUtils { // Types of databases supported by DSpace. See getDbType() public static final String DBMS_POSTGRES = "postgres"; - public static final String DBMS_ORACLE = "oracle"; public static final String DBMS_H2 = "h2"; // Name of the table that Flyway uses for its migration history @@ -369,9 +368,7 @@ public static void main(String[] argv) { .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); System.out.println("There is NO turning back from this action. Backup your DB before " + "continuing."); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("\nORACLE WARNING: your RECYCLEBIN will also be PURGED.\n"); - } else if (dbType.equals(DBMS_POSTGRES)) { + if (dbType.equals(DBMS_POSTGRES)) { System.out.println( "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped " + "if it is in the same schema as the DSpace database.\n"); @@ -467,11 +464,10 @@ private static void printDBInfo(Connection connection) throws SQLException { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("===================================="); - System.out.println("WARNING: Oracle support is deprecated!"); - System.out.println("See https://github.com/DSpace/DSpace/issues/8214"); - System.out.println("====================================="); + if (!dbType.equals(DBMS_POSTGRES) && !dbType.equals(DBMS_H2)) { + System.err.println("===================================="); + System.err.println("ERROR: Database type " + dbType + " is UNSUPPORTED!"); + System.err.println("====================================="); } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); @@ -606,10 +602,6 @@ private synchronized static FluentConfiguration setupFlyway(DataSource datasourc String dbType = getDbType(connection); connection.close(); - if (dbType.equals(DBMS_ORACLE)) { - log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214"); - } - // Determine location(s) where Flyway will load all DB migrations ArrayList scriptLocations = new ArrayList<>(); @@ -946,26 +938,6 @@ private static synchronized void cleanDatabase(Flyway flyway, DataSource dataSou // First, run Flyway's clean command on database. // For MOST database types, this takes care of everything flyway.clean(); - - try (Connection connection = dataSource.getConnection()) { - // Get info about which database type we are using - String dbType = getDbType(connection); - - // If this is Oracle, the only way to entirely clean the database - // is to also purge the "Recyclebin". See: - // http://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_9018.htm - if (dbType.equals(DBMS_ORACLE)) { - PreparedStatement statement = null; - try { - statement = connection.prepareStatement("PURGE RECYCLEBIN"); - statement.executeQuery(); - } finally { - if (statement != null && !statement.isClosed()) { - statement.close(); - } - } - } - } } catch (FlywayException fe) { // If any FlywayException (Runtime) is thrown, change it to a SQLException throw new SQLException("Flyway clean error occurred", fe); @@ -1214,11 +1186,6 @@ public static boolean sequenceExists(Connection connection, String sequenceName) // We need to filter by schema in PostgreSQL schemaFilter = true; break; - case DBMS_ORACLE: - // Oracle specific query for a sequence owned by our current DSpace user - // NOTE: No need to filter by schema for Oracle, as Schema = User - sequenceSQL = "SELECT COUNT(1) FROM user_sequences WHERE sequence_name=?"; - break; case DBMS_H2: // In H2, sequences are listed in the "information_schema.sequences" table // SEE: http://www.h2database.com/html/grammar.html#information_schema @@ -1322,11 +1289,6 @@ public static String getSchemaName(Connection connection) // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; - } else if (dbType.equals(DBMS_ORACLE)) { - // For Oracle, default schema is actually the user account - // See: http://stackoverflow.com/a/13341390 - DatabaseMetaData meta = connection.getMetaData(); - schema = meta.getUserName(); } else { // For H2 (in memory), there is no such thing as a schema schema = null; @@ -1552,8 +1514,6 @@ public static String getDbType(Connection connection) String dbms_lc = prodName.toLowerCase(Locale.ROOT); if (dbms_lc.contains("postgresql")) { return DBMS_POSTGRES; - } else if (dbms_lc.contains("oracle")) { - return DBMS_ORACLE; } else if (dbms_lc.contains("h2")) { // Used for unit testing only return DBMS_H2; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 842fc15e1657..f0c4e4e17990 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -78,13 +78,6 @@ protected static Integer dropDBConstraint(Connection connection, String tableNam constraintName += "_" + StringUtils.lowerCase(constraintSuffix); cascade = true; break; - case "oracle": - // In Oracle, constraints are listed in the USER_CONS_COLUMNS table - constraintNameSQL = "SELECT CONSTRAINT_NAME " + - "FROM USER_CONS_COLUMNS " + - "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; - cascade = true; - break; case "h2": // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + @@ -160,9 +153,6 @@ protected static Integer dropDBTable(Connection connection, String tableName) case "postgresql": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; - case "oracle": - dropTableSQL = "DROP TABLE " + tableName + " CASCADE CONSTRAINTS"; - break; case "h2": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; @@ -208,9 +198,6 @@ protected static Integer dropDBSequence(Connection connection, String sequenceNa case "postgresql": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; - case "oracle": - dropSequenceSQL = "DROP SEQUENCE " + sequenceName; - break; case "h2": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; @@ -256,9 +243,6 @@ protected static Integer dropDBView(Connection connection, String viewName) case "postgresql": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; - case "oracle": - dropViewSQL = "DROP VIEW " + viewName + " CASCADE CONSTRAINTS"; - break; case "h2": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index 56c5b474d9fc..758e745ddc86 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -19,10 +19,9 @@ * of the "community" table. This is necessary for the upgrade from 1.3 to 1.4 *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 6d82055e530e..37100a17f926 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -19,10 +19,9 @@ * from 1.5 to 1.6 *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index ea72d99b6e29..8e2be91127c8 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -20,10 +20,9 @@ * this column must be renamed to "resource_id". *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index b3306a9fc93c..0361e6805356 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -67,8 +67,6 @@ public void migrate(Context context) String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index 9aa0f4877c39..4c1cf3365395 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -46,8 +46,6 @@ public void migrate(Context context) throws Exception { String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql index 7548fa4c6acb..edebe6e087fb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -15,7 +15,7 @@ -- ----------------- -- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/postgresql/upgradeMetaDataTable.sql -- -- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------ From dac7ed2f09e0d8c9ae4931910944ef32deccc924 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 3 May 2023 14:47:16 -0500 Subject: [PATCH 155/686] Migrate to org.hibernate.type.TextType from our custom LobType --- .../org/dspace/authorize/ResourcePolicy.java | 2 +- .../org/dspace/content/MetadataValue.java | 2 +- .../java/org/dspace/orcid/OrcidHistory.java | 6 +- .../java/org/dspace/orcid/OrcidQueue.java | 4 +- .../main/java/org/dspace/scripts/Process.java | 2 +- .../rdbms/hibernate/DatabaseAwareLobType.java | 57 ------------------- 6 files changed, 8 insertions(+), 65 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index 38b6aef45bc2..c781400bae45 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -100,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "rpdescription") private String rpdescription; diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index c3deaacd804c..31479e620618 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -60,7 +60,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "text_value") private String value; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java index a567c6e7a7fe..07a79384c77c 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -80,7 +80,7 @@ public class OrcidHistory implements ReloadableEntity { * A description of the synchronized resource. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -89,7 +89,7 @@ public class OrcidHistory implements ReloadableEntity { * the owner itself. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "metadata") private String metadata; @@ -104,7 +104,7 @@ public class OrcidHistory implements ReloadableEntity { * The response message incoming from ORCID. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "response_message") private String responseMessage; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java index 9261f14eea24..65b66cd20c3e 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -65,7 +65,7 @@ public class OrcidQueue implements ReloadableEntity { * A description of the resource to be synchronized. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -89,7 +89,7 @@ public class OrcidQueue implements ReloadableEntity { */ @Lob @Column(name = "metadata") - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") private String metadata; /** diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index 190d214a3c8d..eab3ba460c09 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -71,7 +71,7 @@ public class Process implements ReloadableEntity { private ProcessStatus processStatus; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "parameters") private String parameters; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java deleted file mode 100644 index 95939f9902aa..000000000000 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.storage.rdbms.hibernate; - -import org.apache.commons.lang.StringUtils; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.hibernate.type.AbstractSingleColumnStandardBasicType; -import org.hibernate.type.descriptor.java.StringTypeDescriptor; -import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; -import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor; -import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; - -/** - * A Hibernate @Type used to properly support the CLOB in both Postgres and Oracle. - * PostgreSQL doesn't have a CLOB type, instead it's a TEXT field. - * Normally, you'd use org.hibernate.type.TextType to support TEXT, but that won't work for Oracle. - * https://github.com/hibernate/hibernate-orm/blob/5.6/hibernate-core/src/main/java/org/hibernate/type/TextType.java - * - * This Type checks if we are using PostgreSQL. - * If so, it configures Hibernate to map CLOB to LongVarChar (same as org.hibernate.type.TextType) - * If not, it uses default CLOB (which works for other databases). - */ -public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType { - - public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType(); - - public DatabaseAwareLobType() { - super( getDbDescriptor(), StringTypeDescriptor.INSTANCE ); - } - - public static SqlTypeDescriptor getDbDescriptor() { - if ( isPostgres() ) { - return LongVarcharTypeDescriptor.INSTANCE; - } else { - return ClobTypeDescriptor.DEFAULT; - } - } - - private static boolean isPostgres() { - ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - String dbDialect = configurationService.getProperty("db.dialect"); - - return StringUtils.containsIgnoreCase(dbDialect, "PostgreSQL"); - } - - @Override - public String getName() { - return "database_aware_lob"; - } -} - From 66eb8a548fe55698cd53766fb86f605f23534323 Mon Sep 17 00:00:00 2001 From: Marie Verdonck Date: Thu, 4 May 2023 20:11:47 +0200 Subject: [PATCH 156/686] Browse-by support for controlled vocabularies https://github.com/DSpace/RestContract/pull/225 --- .../authority/ChoiceAuthorityServiceImpl.java | 54 +++++++++++++ .../DSpaceControlledVocabularyIndex.java | 45 +++++++++++ .../service/ChoiceAuthorityService.java | 4 + .../DiscoveryConfigurationService.java | 12 +++ .../rest/converter/BrowseIndexConverter.java | 6 +- .../HierarchicalBrowseConverter.java | 42 ++++++++++ .../rest/link/BrowseEntryHalLinkFactory.java | 4 +- .../app/rest/model/BrowseIndexRest.java | 77 +++++++++++++++---- .../model/hateoas/BrowseIndexResource.java | 34 +++++++- .../repository/BrowseEntryLinkRepository.java | 5 +- .../repository/BrowseIndexRestRepository.java | 30 +++++++- .../repository/BrowseItemLinkRepository.java | 5 +- .../repository/VocabularyRestRepository.java | 2 +- 13 files changed, 288 insertions(+), 32 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index f25e2c4646b2..ec8f8769be52 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -15,6 +15,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -30,6 +31,8 @@ import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader private SubmissionConfigReader itemSubmissionConfigReader; @@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected ConfigurationService configurationService; @Autowired(required = true) protected PluginService pluginService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -540,4 +548,50 @@ public Choice getParentChoice(String authorityName, String vocabularyId, String HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); return ma.getParentChoice(authorityName, vocabularyId, locale); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 000000000000..6f350fc71ec9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java index eb34de29c14b..a9fd24e947b3 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java @@ -15,6 +15,7 @@ import org.dspace.content.authority.Choice; import org.dspace.content.authority.ChoiceAuthority; import org.dspace.content.authority.Choices; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; /** * Broker for ChoiceAuthority plugins, and for other information configured @@ -220,4 +221,7 @@ public Choices getBestMatch(String fieldKey, String query, Collection collection * @return the parent Choice object if any */ public Choice getParentChoice(String authorityName, String vocabularyId, String locale); + + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab); + } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 636e7ccd2ae4..f4fd3ca0ef5c 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -92,6 +92,18 @@ public List getIndexAlwaysConfigurations() { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java index 6ee836e5fc9b..1e2899b396ab 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java @@ -7,6 +7,9 @@ */ package org.dspace.app.rest.converter; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; + import java.util.ArrayList; import java.util.List; @@ -33,14 +36,15 @@ public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { bir.setId(obj.getName()); bir.setDataType(obj.getDataType()); bir.setOrder(obj.getDefaultOrder()); - bir.setMetadataBrowse(obj.isMetadataIndex()); List metadataList = new ArrayList(); if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setBrowseType(BROWSE_TYPE_FLAT); } bir.setMetadataList(metadataList); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java new file mode 100644 index 000000000000..7b0cea9d8fb2 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.converter; + +import java.util.ArrayList; + +import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.springframework.stereotype.Component; + +/** + * This is the converter from a {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} to a + * {@link org.dspace.app.rest.model.BrowseIndexRest#BROWSE_TYPE_HIERARCHICAL} {@link org.dspace.app.rest.model.BrowseIndexRest} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +@Component +public class HierarchicalBrowseConverter implements DSpaceConverter { + + @Override + public BrowseIndexRest convert(DSpaceControlledVocabularyIndex obj, Projection projection) { + BrowseIndexRest bir = new BrowseIndexRest(); + bir.setProjection(projection); + bir.setId(obj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL); + bir.setFacetType(obj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(obj.getVocabulary().getPluginInstanceName()); + bir.setMetadataList(new ArrayList<>(obj.getMetadataFields())); + return bir; + } + + @Override + public Class getModelClass() { + return DSpaceControlledVocabularyIndex.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java index ee70dbf43132..9e515984fe03 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java @@ -37,11 +37,11 @@ protected void addLinks(final BrowseEntryResource halResource, final Pageable pa UriComponentsBuilder baseLink = uriBuilder( getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(), English.plural(bix.getType()), bix.getId(), - BrowseIndexRest.ITEMS, null, null)); + BrowseIndexRest.LINK_ITEMS, null, null)); addFilterParams(baseLink, data); - list.add(buildLink(BrowseIndexRest.ITEMS, + list.add(buildLink(BrowseIndexRest.LINK_ITEMS, baseLink.build().encode().toUriString())); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java index 9fee6cbdbad2..f7978f00fdf5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BrowseIndexRest.java @@ -10,6 +10,7 @@ import java.util.List; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.dspace.app.rest.RestResourceController; @@ -20,11 +21,11 @@ */ @LinksRest(links = { @LinkRest( - name = BrowseIndexRest.ITEMS, + name = BrowseIndexRest.LINK_ITEMS, method = "listBrowseItems" ), @LinkRest( - name = BrowseIndexRest.ENTRIES, + name = BrowseIndexRest.LINK_ENTRIES, method = "listBrowseEntries" ) }) @@ -35,20 +36,38 @@ public class BrowseIndexRest extends BaseObjectRest { public static final String CATEGORY = RestAddressableModel.DISCOVER; - public static final String ITEMS = "items"; - public static final String ENTRIES = "entries"; - - boolean metadataBrowse; - + public static final String LINK_ITEMS = "items"; + public static final String LINK_ENTRIES = "entries"; + public static final String LINK_VOCABULARY = "vocabulary"; + + // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types, + // etc. the second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_VALUE_LIST = "valueList"; + // if the browse index has one level: the full list of items + public static final String BROWSE_TYPE_FLAT = "flatBrowse"; + // if the browse index should display the vocabulary tree. The 1st level shows the tree. + // The second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse"; + + // Shared fields + String browseType; @JsonProperty(value = "metadata") List metadataList; + // Single browse index fields + @JsonInclude(JsonInclude.Include.NON_NULL) String dataType; - + @JsonInclude(JsonInclude.Include.NON_NULL) List sortOptions; - + @JsonInclude(JsonInclude.Include.NON_NULL) String order; + // Hierarchical browse fields + @JsonInclude(JsonInclude.Include.NON_NULL) + String facetType; + @JsonInclude(JsonInclude.Include.NON_NULL) + String vocabulary; + @JsonIgnore @Override public String getCategory() { @@ -60,14 +79,6 @@ public String getType() { return NAME; } - public boolean isMetadataBrowse() { - return metadataBrowse; - } - - public void setMetadataBrowse(boolean metadataBrowse) { - this.metadataBrowse = metadataBrowse; - } - public List getMetadataList() { return metadataList; } @@ -100,6 +111,38 @@ public void setSortOptions(List sortOptions) { this.sortOptions = sortOptions; } + /** + * - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names, + * subjects, types, etc. the second level is the actual list of items linked to a specific entry + * - flatBrowse if the browse index has one level: the full list of items + * - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree. + * The second level is the actual list of items linked to a specific entry + */ + public void setBrowseType(String browseType) { + this.browseType = browseType; + } + + public String getBrowseType() { + return browseType; + } + + public void setFacetType(String facetType) { + this.facetType = facetType; + } + + public String getFacetType() { + return facetType; + } + + public void setVocabulary(String vocabulary) { + this.vocabulary = vocabulary; + } + + + public String getVocabulary() { + return vocabulary; + } + @Override public Class getController() { return RestResourceController.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java index f6c821595f55..61158704ea5a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java @@ -7,9 +7,20 @@ */ package org.dspace.app.rest.model.hateoas; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +import org.atteo.evo.inflector.English; +import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.model.VocabularyRest; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.utils.Utils; +import org.dspace.content.authority.ChoiceAuthority; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.springframework.hateoas.Link; +import org.springframework.web.util.UriComponentsBuilder; /** * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource @@ -19,15 +30,32 @@ */ @RelNameDSpaceResource(BrowseIndexRest.NAME) public class BrowseIndexResource extends DSpaceResource { + + public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { super(bix, utils); // TODO: the following code will force the embedding of items and // entries in the browseIndex we need to find a way to populate the rels // array from the request/projection right now it is always null // super(bix, utils, "items", "entries"); - if (bix.isMetadataBrowse()) { - add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES)); + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) { + ChoiceAuthorityService choiceAuthorityService = + ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService(); + ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary()); + UriComponentsBuilder baseLink = linkTo( + methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null, + null, VocabularyRest.CATEGORY, + English.plural(VocabularyRest.NAME), source.getPluginInstanceName(), + "", null, null)).toUriComponentsBuilder(); + + add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY)); } - add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java index 93224f78cd53..f608595c3dda 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java @@ -40,7 +40,7 @@ * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES) public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -127,7 +127,8 @@ public Page listBrowseEntries(HttpServletRequest request, Strin @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (bir.isMetadataBrowse() && "entries".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) && + name.equals(BrowseIndexRest.LINK_ENTRIES)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index 01277ff29b19..c87cbc6c037e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -13,7 +13,10 @@ import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -27,20 +30,39 @@ @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) public class BrowseIndexRestRepository extends DSpaceRestRepository { + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + @Override @PreAuthorize("permitAll()") public BrowseIndexRest findOne(Context context, String name) { - BrowseIndexRest bi = null; + BrowseIndexRest bi = createFromMatchingBrowseIndex(name); + if (bi == null) { + bi = createFromMatchingVocabulary(name); + } + + return bi; + } + + private BrowseIndexRest createFromMatchingVocabulary(String name) { + DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name); + if (vocabularyIndex != null) { + return converter.toRest(vocabularyIndex, utils.obtainProjection()); + } + return null; + } + + private BrowseIndexRest createFromMatchingBrowseIndex(String name) { BrowseIndex bix; try { - bix = BrowseIndex.getBrowseIndex(name); + bix = BrowseIndex.getBrowseIndex(name); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); } if (bix != null) { - bi = converter.toRest(bix, utils.obtainProjection()); + return converter.toRest(bix, utils.obtainProjection()); } - return bi; + return null; } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java index 74aa9f38bfec..baa79bc80ae7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java @@ -42,7 +42,7 @@ * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS) public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -155,7 +155,8 @@ public Page listBrowseItems(HttpServletRequest request, String browseN @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (!bir.isMetadataBrowse() && "items".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) && + name.equals(BrowseIndexRest.LINK_ITEMS)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java index dcdf71186bcb..fcc37d13160d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java @@ -53,7 +53,7 @@ public class VocabularyRestRepository extends DSpaceRestRepository Date: Fri, 5 May 2023 12:35:40 +0200 Subject: [PATCH 157/686] 94299: Remove bitstreams in bulk via patch --- .../src/main/resources/Messages.properties | 2 + .../rest/BitstreamCategoryRestController.java | 63 ++++++++++++++++++ .../DSpaceApiExceptionControllerAdvice.java | 1 + .../RESTBitstreamNotFoundException.java | 51 +++++++++++++++ .../repository/BitstreamRestRepository.java | 19 ++++++ .../operation/BitstreamRemoveOperation.java | 65 +++++++++++++++++++ 6 files changed, 201 insertions(+) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index b537819c06a6..78e2774013a9 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -120,3 +120,5 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = Refused org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java new file mode 100644 index 000000000000..13929e5a9a73 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; + +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import com.fasterxml.jackson.databind.JsonNode; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.repository.BitstreamRestRepository; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * REST controller for handling bulk updates to Bitstream resources. + *

+ * This controller is responsible for handling requests to the bitstream category, which allows for updating + * multiple bitstream resources in a single operation. + *

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@RestController +@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME) +public class BitstreamCategoryRestController { + @Autowired + BitstreamRestRepository bitstreamRestRepository; + + /** + * Handles PATCH requests to the bitstream category for bulk updates of bitstream resources. + * + * @param request the HTTP request object. + * @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied. + * @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a + * HTTP 204 No Content response since currently only a delete operation is supported. + * @throws SQLException if an error occurs while accessing the database. + * @throws AuthorizeException if the user is not authorized to perform the requested operation. + */ + @PreAuthorize("hasAuthority('ADMIN')") + @RequestMapping(method = RequestMethod.PATCH) + public ResponseEntity> patch(HttpServletRequest request, + @RequestBody(required = true) JsonNode jsonNode) + throws SQLException, AuthorizeException { + Context context = obtainContext(request); + bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode); + return ResponseEntity.noContent().build(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java index 6ded477813f1..3f55536666a0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java @@ -163,6 +163,7 @@ protected void handleOrcidValidationException(HttpServletRequest request, HttpSe EPersonNameNotProvidedException.class, GroupNameNotProvidedException.class, GroupHasPendingWorkflowTasksException.class, + RESTBitstreamNotFoundException.class }) protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, TranslatableException ex) throws IOException { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java new file mode 100644 index 000000000000..a0b48e3c0dfc --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.exception; + +import java.text.MessageFormat; + +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; + +/** + *

Extend {@link UnprocessableEntityException} to provide a specific error message + * in the REST response. The error message is added to the response in + * {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException}, + * hence it should not contain sensitive or security-compromising info.

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException { + + public static String uuid; + + /** + * @param formatStr string with placeholders, ideally obtained using {@link I18nUtil} + * @return message with bitstream id substituted + */ + private static String formatMessage(String formatStr) { + MessageFormat fmt = new MessageFormat(formatStr); + return fmt.format(new String[]{uuid}); + } + + public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message"; + + public RESTBitstreamNotFoundException(String uuid) { + super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY))); + RESTBitstreamNotFoundException.uuid = uuid; + } + + public String getMessageKey() { + return MESSAGE_KEY; + } + + public String getLocalizedMessage(Context context) { + return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 3696b3866808..8ef06ecbadc2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -17,9 +17,12 @@ import java.util.UUID; import javax.servlet.http.HttpServletRequest; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.Parameter; import org.dspace.app.rest.SearchRestMethod; +import org.dspace.app.rest.converter.JsonPatchConverter; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; @@ -292,4 +295,20 @@ public BundleRest performBitstreamMove(Context context, Bitstream bitstream, Bun return converter.toRest(targetBundle, utils.obtainProjection()); } + + /** + * Method that will transform the provided PATCH json body into a list of operations. + * The operations will be handled by a supporting class resolved by the + * {@link org.dspace.app.rest.repository.patch.ResourcePatch#patch} method. + * + * @param context The context + * @param jsonNode the json body provided from the request body + */ + public void patchBitstreamsInBulk(Context context, JsonNode jsonNode) throws SQLException { + ObjectMapper mapper = new ObjectMapper(); + JsonPatchConverter patchConverter = new JsonPatchConverter(mapper); + Patch patch = patchConverter.convert(jsonNode); + resourcePatch.patch(obtainContext(), null, patch.getOperations()); + context.commit(); + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java new file mode 100644 index 000000000000..5d37e04cea6c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.app.rest.exception.RESTBitstreamNotFoundException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * A PATCH operation for removing bitstreams in bulk from the repository. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json" + * -d '[ + * {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"} + * ]' + * + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@Component +public class BitstreamRemoveOperation extends PatchOperation { + @Autowired + BitstreamService bitstreamService; + private static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + + @Override + public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { + String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, ""); + Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete)); + if (bitstreamToDelete == null) { + throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); + } + + try { + bitstreamService.delete(context, bitstreamToDelete); + bitstreamService.update(context, bitstreamToDelete); + } catch (AuthorizeException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + return null; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && + operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); + } +} From 09b56c2d99b770d376a3e49cb7f01b3ca0a4f5eb Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 5 May 2023 13:05:34 +0200 Subject: [PATCH 158/686] 94299: Configurable limit on amount of patch operations --- .../app/rest/repository/BitstreamRestRepository.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 8ef06ecbadc2..586525bbd2fe 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -43,6 +43,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.core.Context; import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -77,6 +78,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository operationsLimit) { + throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " + + operationsLimit); + } resourcePatch.patch(obtainContext(), null, patch.getOperations()); context.commit(); } From 80706592aae50681dde850391770e6fe09ee6eca Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 5 May 2023 13:07:47 +0200 Subject: [PATCH 159/686] Revert "94299 Multiple Bitstream deletion endpoint" This reverts commit 51d8874a --- .../app/rest/RestResourceController.java | 33 - .../repository/BitstreamRestRepository.java | 44 - .../rest/repository/DSpaceRestRepository.java | 18 - .../app/rest/BitstreamRestRepositoryIT.java | 955 ------------------ 4 files changed, 1050 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java index 24468660f016..b82b4830753c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest; -import static org.dspace.app.rest.utils.ContextUtil.obtainContext; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_HEX32; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG; @@ -56,8 +55,6 @@ import org.dspace.app.rest.utils.RestRepositoryUtils; import org.dspace.app.rest.utils.Utils; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DSpaceObject; -import org.dspace.core.Context; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -1053,13 +1050,6 @@ public ResponseEntity> delete(HttpServletRequest request, return deleteInternal(apiCategory, model, uuid); } - @RequestMapping(method = RequestMethod.DELETE, consumes = {"text/uri-list"}) - public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model) - throws HttpRequestMethodNotSupportedException { - return deleteUriListInternal(request, apiCategory, model); - } - /** * Internal method to delete resource. * @@ -1077,29 +1067,6 @@ private ResponseEntity> deleteI return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); } - public ResponseEntity> deleteUriListInternal( - HttpServletRequest request, - String apiCategory, - String model) - throws HttpRequestMethodNotSupportedException { - checkModelPluralForm(apiCategory, model); - DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); - Context context = obtainContext(request); - List dsoStringList = utils.getStringListFromRequest(request); - List dsoList = utils.constructDSpaceObjectList(context, dsoStringList); - if (dsoStringList.size() != dsoList.size()) { - throw new ResourceNotFoundException("One or more bitstreams could not be found."); - } - try { - repository.delete(dsoList); - } catch (ClassCastException e) { - log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory + - " and model: " + model, e); - return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR); - } - return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); - } - /** * Execute a PUT request for an entity with id of type UUID; * diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 586525bbd2fe..454b6f8453d6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -10,8 +10,6 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -156,48 +154,6 @@ protected void delete(Context context, UUID id) throws AuthorizeException { } } - @Override - protected void deleteList(Context context, List dsoList) - throws SQLException, AuthorizeException { - // check if list is empty - if (dsoList.isEmpty()) { - throw new ResourceNotFoundException("No bitstreams given."); - } - // check if every DSO is a Bitstream - if (dsoList.stream().anyMatch(dso -> !(dso instanceof Bitstream))) { - throw new UnprocessableEntityException("Not all given items are bitstreams."); - } - // check that they're all part of the same Item - List parents = new ArrayList<>(); - for (DSpaceObject dso : dsoList) { - Bitstream bit = bs.find(context, dso.getID()); - DSpaceObject bitstreamParent = bs.getParentObject(context, bit); - if (bit == null) { - throw new ResourceNotFoundException("The bitstream with uuid " + dso.getID() + " could not be found"); - } - // we have to check if the bitstream has already been deleted - if (bit.isDeleted()) { - throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID() - + " was already deleted"); - } else { - parents.add(bitstreamParent); - } - } - if (parents.stream().distinct().count() > 1) { - throw new UnprocessableEntityException("Not all given items are part of the same Item."); - } - // delete all Bitstreams - Iterator iterator = dsoList.iterator(); - while (iterator.hasNext()) { - Bitstream bit = (Bitstream) iterator.next(); - try { - bs.delete(context, bit); - } catch (SQLException | IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - } - /** * Find the bitstream for the provided handle and sequence or filename. * When a bitstream can be found with the sequence ID it will be returned if the user has "METADATA_READ" access. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 219b7c4123b2..01f127eca5ac 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -26,7 +26,6 @@ import org.dspace.app.rest.model.RestAddressableModel; import org.dspace.app.rest.model.patch.Patch; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DSpaceObject; import org.dspace.content.service.MetadataFieldService; import org.dspace.core.Context; import org.springframework.beans.factory.BeanNameAware; @@ -257,23 +256,6 @@ public void deleteAll() { } - public void delete(List dsoList) { - Context context = obtainContext(); - try { - getThisRepository().deleteList(context, dsoList); - context.commit(); - } catch (AuthorizeException e) { - throw new RESTAuthorizationException(e); - } catch (SQLException ex) { - throw new RuntimeException(ex.getMessage(), ex); - } - } - - protected void deleteList(Context context, List list) - throws AuthorizeException, SQLException, RepositoryMethodNotImplementedException { - throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); - } - @Override /** * This method cannot be implemented we required all the find method to be paginated diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 391d9e419330..f9c1e469fcfe 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -13,7 +13,6 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -1202,960 +1201,6 @@ public void deleteDeleted() throws Exception { .andExpect(status().isNotFound()); } - @Test - public void deleteListOneBitstream() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - String bitstreamContent = "ThisIsSomeDummyText"; - //Add a bitstream to an item - Bitstream bitstream = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - bitstream = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream") - .withDescription("Description") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete - getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListOneOfMultipleBitstreams() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete bitstream1 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete for bitstream1 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isNotFound()); - - // check that bitstream2 still exists - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())); - - // check that bitstream3 still exists - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())) - ; - } - - @Test - public void deleteListAllBitstreams() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete all bitstreams - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete for bitstream1 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isNotFound()); - - // Verify 404 after delete for bitstream2 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isNotFound()); - - // Verify 404 after delete for bitstream3 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListForbidden() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(eperson.getEmail(), password); - - // Delete using an unauthorized user - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isForbidden()); - - // Verify the bitstreams are still here - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListUnauthorized() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - // Delete as anonymous - getClient().perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isUnauthorized()); - - // Verify the bitstreams are still here - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListEmpty() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete with empty list throws 404 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("")) - .andExpect(status().isNotFound()); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListNotBitstream() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete with list containing non-Bitstream throws 422 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() - + " \n http://localhost:8080/server/api/core/items/" + publicItem1.getID())) - .andExpect(status().is(422)); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListDifferentItems() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. Two public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 1 bitstream to each item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem2, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete with list containing Bitstreams from different items throws 422 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().is(422)); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - } - - @Test - public void deleteListLogo() throws Exception { - // We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - // ** GIVEN ** - // 1. A community with a logo - parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community") - .build(); - - // 2. A collection with a logo - Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection") - .withLogo("logo_collection").build(); - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // trying to DELETE parentCommunity logo and collection logo should work - // we have to delete them separately otherwise it will throw 422 as they belong to different items - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + parentCommunity.getLogo().getID())) - .andExpect(status().is(204)); - - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + col.getLogo().getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete for parentCommunity logo - getClient(token).perform(get("/api/core/bitstreams/" + parentCommunity.getLogo().getID())) - .andExpect(status().isNotFound()); - - // Verify 404 after delete for collection logo - getClient(token).perform(get("/api/core/bitstreams/" + col.getLogo().getID())) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListMissing() throws Exception { - String token = getAuthToken(admin.getEmail(), password); - - // Delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - - // Verify 404 after failed delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListOneMissing() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete all bitstreams and a missing bitstream returns 404 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListOneMissingDifferentItems() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. Two public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 1 bitstream to each item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem2, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete all bitstreams and a missing bitstream returns 404 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - } - - @Test - public void deleteListDeleted() throws Exception { - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - String bitstreamContent = "ThisIsSomeDummyText"; - //Add a bitstream to an item - Bitstream bitstream = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - bitstream = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream") - .withDescription("Description") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().is(204)); - - // Verify 404 when trying to delete a non-existing, already deleted, bitstream - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().is(422)); - } - - @Test - public void deleteListOneDeleted() throws Exception { - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete bitstream1 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().is(204)); - - // Verify 404 when trying to delete a non-existing, already deleted, bitstream - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().is(422)); - } - @Test public void patchBitstreamMetadataAuthorized() throws Exception { runPatchMetadataTests(admin, 200); From 648b27befbe992515319a79d78d01b8327c338d4 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 5 May 2023 14:34:53 +0200 Subject: [PATCH 160/686] 101549: Make BrowseIndexRestRepository#findAll also return hierarchicalBrowses --- .../java/org/dspace/browse/BrowseIndex.java | 8 ++-- .../DSpaceControlledVocabularyIndex.java | 4 +- .../rest/converter/BrowseIndexConverter.java | 24 ++++++++--- .../HierarchicalBrowseConverter.java | 42 ------------------- .../repository/BrowseIndexRestRepository.java | 6 ++- 5 files changed, 32 insertions(+), 52 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 8d065c21ce36..6c38c8dd664b 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -102,7 +104,7 @@ private BrowseIndex() { * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { try { number = -1; tableBaseName = baseName; diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java index 6f350fc71ec9..bf8194dbd53b 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -9,6 +9,7 @@ import java.util.Set; +import org.dspace.browse.BrowseIndex; import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; /** @@ -18,7 +19,7 @@ * * @author Marie Verdonck (Atmire) on 04/05/2023 */ -public class DSpaceControlledVocabularyIndex { +public class DSpaceControlledVocabularyIndex extends BrowseIndex { protected DSpaceControlledVocabulary vocabulary; protected Set metadataFields; @@ -26,6 +27,7 @@ public class DSpaceControlledVocabularyIndex { public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); this.vocabulary = controlledVocabulary; this.metadataFields = metadataFields; this.facetConfig = facetConfig; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java index 1e2899b396ab..2595968d4d02 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.converter; import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import java.util.ArrayList; @@ -16,6 +17,7 @@ import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.projection.Projection; import org.dspace.browse.BrowseIndex; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; import org.dspace.sort.SortException; import org.dspace.sort.SortOption; import org.springframework.stereotype.Component; @@ -33,19 +35,29 @@ public class BrowseIndexConverter implements DSpaceConverter metadataList = new ArrayList(); - if (obj.isMetadataIndex()) { + String id = obj.getName(); + if (obj instanceof DSpaceControlledVocabularyIndex) { + DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj; + metadataList = new ArrayList<>(vocObj.getMetadataFields()); + id = vocObj.getVocabulary().getPluginInstanceName(); + bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL); + } else if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); bir.setBrowseType(BROWSE_TYPE_FLAT); } + bir.setId(id); bir.setMetadataList(metadataList); List sortOptionsList = new ArrayList(); @@ -56,7 +68,9 @@ public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { } catch (SortException e) { throw new RuntimeException(e.getMessage(), e); } - bir.setSortOptions(sortOptionsList); + if (!bir.getBrowseType().equals(BROWSE_TYPE_HIERARCHICAL)) { + bir.setSortOptions(sortOptionsList); + } return bir; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java deleted file mode 100644 index 7b0cea9d8fb2..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java +++ /dev/null @@ -1,42 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.converter; - -import java.util.ArrayList; - -import org.dspace.app.rest.model.BrowseIndexRest; -import org.dspace.app.rest.projection.Projection; -import org.dspace.content.authority.DSpaceControlledVocabularyIndex; -import org.springframework.stereotype.Component; - -/** - * This is the converter from a {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} to a - * {@link org.dspace.app.rest.model.BrowseIndexRest#BROWSE_TYPE_HIERARCHICAL} {@link org.dspace.app.rest.model.BrowseIndexRest} - * - * @author Marie Verdonck (Atmire) on 04/05/2023 - */ -@Component -public class HierarchicalBrowseConverter implements DSpaceConverter { - - @Override - public BrowseIndexRest convert(DSpaceControlledVocabularyIndex obj, Projection projection) { - BrowseIndexRest bir = new BrowseIndexRest(); - bir.setProjection(projection); - bir.setId(obj.getVocabulary().getPluginInstanceName()); - bir.setBrowseType(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL); - bir.setFacetType(obj.getFacetConfig().getIndexFieldName()); - bir.setVocabulary(obj.getVocabulary().getPluginInstanceName()); - bir.setMetadataList(new ArrayList<>(obj.getMetadataFields())); - return bir; - } - - @Override - public Class getModelClass() { - return DSpaceControlledVocabularyIndex.class; - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index c87cbc6c037e..b166bffda785 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest.repository; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -68,7 +69,10 @@ private BrowseIndexRest createFromMatchingBrowseIndex(String name) { @Override public Page findAll(Context context, Pageable pageable) { try { - List indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); + List indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices())); + choiceAuthorityService.getChoiceAuthoritiesNames() + .stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null) + .forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name))); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); From 999fb46e8dfafe82313d6b19441ec34075a2a4c8 Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 5 May 2023 15:10:12 +0200 Subject: [PATCH 161/686] 94299: Add IT --- .../operation/BitstreamRemoveOperation.java | 2 +- .../app/rest/BitstreamRestRepositoryIT.java | 229 ++++++++++++++++++ 2 files changed, 230 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java index 5d37e04cea6c..93c495a30233 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -38,7 +38,7 @@ public class BitstreamRemoveOperation extends PatchOperation { @Autowired BitstreamService bitstreamService; - private static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; @Override public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index f9c1e469fcfe..3b01b4eac2ab 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -7,22 +7,29 @@ */ package org.dspace.app.rest; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.UUID; +import javax.ws.rs.core.MediaType; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -30,6 +37,8 @@ import org.dspace.app.rest.matcher.BitstreamMatcher; import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.authorize.service.ResourcePolicyService; @@ -52,10 +61,13 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.hamcrest.Matchers; +import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.MvcResult; public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -2279,6 +2291,223 @@ public void findByHandleAndFileNameForPublicItemWithEmbargoOnFile() throws Excep )); } + @Test + public void deleteBitstreamsInBulk() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Verify that only the three bitstreams were deleted and the fourth one still exists + Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3)); + Assert.assertTrue(bitstreamExists(token, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidUUID() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // For the third bitstream, use an invalid UUID + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + UUID randomUUID = UUID.randomUUID(); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + MvcResult result = getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()) + .andReturn(); + + // Verify our custom error message is returned when an invalid UUID is used + assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository", + result.getResponse().getErrorMessage()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + // Add three out of four bitstreams to the list of bitstreams to be deleted + // But set the patch.operations.limit property to 2, so that the request is invalid + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("patch.operations.limit", 2); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_OK) { + return false; + } + } + return true; + } + + public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_NOT_FOUND) { + return false; + } + } + return true; + } } From acb700c88774b1aea471b4bf08037a8dcfaa8be5 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 5 May 2023 15:55:49 +0200 Subject: [PATCH 162/686] 101549: Fix BrowseIndexMatcher and BrowsesResourceControllerIT --- .../app/rest/BrowsesResourceControllerIT.java | 26 +++++++++++--- .../app/rest/matcher/BrowseIndexMatcher.java | 34 ++++++++++++++++--- 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index baf459408dbd..a5f4af102cd0 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -63,22 +63,23 @@ public void findAll() throws Exception { //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page + //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(4))) + .andExpect(jsonPath("$.page.totalElements", is(5))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) - //The array of browse index should have a size 4 - .andExpect(jsonPath("$._embedded.browses", hasSize(4))) + //The array of browse index should have a size 5 + .andExpect(jsonPath("$._embedded.browses", hasSize(5))) //Check that all (and only) the default browse indexes are present .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( BrowseIndexMatcher.dateIssuedBrowseIndex("asc"), BrowseIndexMatcher.contributorBrowseIndex("asc"), BrowseIndexMatcher.titleBrowseIndex("asc"), - BrowseIndexMatcher.subjectBrowseIndex("asc") + BrowseIndexMatcher.subjectBrowseIndex("asc"), + BrowseIndexMatcher.hierarchicalBrowseIndex("srsc") ))) ; } @@ -125,6 +126,21 @@ public void findBrowseByContributor() throws Exception { ; } + @Test + public void findBrowseByVocabulary() throws Exception { + //Use srsc as this vocabulary is included by default + //When we call the root endpoint + getClient().perform(get("/api/discover/browses/srsc")) + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //Check that the JSON root matches the expected browse index + .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc"))) + ; + } + @Test public void findBrowseBySubject() throws Exception { //When we call the root endpoint diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index 82d611facf80..80f27b6bbbeb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -8,6 +8,9 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -16,7 +19,6 @@ import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; /** * Utility class to construct a Matcher for a browse index @@ -31,7 +33,8 @@ private BrowseIndexMatcher() { } public static Matcher subjectBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.subject.*")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -44,7 +47,8 @@ public static Matcher subjectBrowseIndex(final String order) { public static Matcher titleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -56,7 +60,8 @@ public static Matcher titleBrowseIndex(final String order) { public static Matcher contributorBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -69,7 +74,8 @@ public static Matcher contributorBrowseIndex(final String order) public static Matcher dateIssuedBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.date.issued")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -77,4 +83,22 @@ public static Matcher dateIssuedBrowseIndex(final String order) hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items")) ); } + + public static Matcher hierarchicalBrowseIndex(final String vocabulary) { + return allOf( + hasJsonPath("$.metadata", contains("dc.subject")), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), + hasJsonPath("$.facetType", equalToIgnoringCase("subject")), + hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), + hasJsonPath("$._links.vocabulary.href", + is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), + hasJsonPath("$._links.items.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))), + hasJsonPath("$._links.entries.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))), + hasJsonPath("$._links.self.href", + is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary))) + ); + } } From ab240d7f0ec07d9454ae925b2a03154c5cb2b80a Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 5 May 2023 17:47:24 +0200 Subject: [PATCH 163/686] 101549: Fix BrowsesResourceControllerIT --- .../org/dspace/app/rest/BrowsesResourceControllerIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 301ffeab4177..d1791ab872bb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -2158,7 +2159,7 @@ public void findOneLinked() throws Exception { // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))) + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))) ; } @@ -2175,7 +2176,7 @@ public void findOneLinkedPassingTwoFields() throws Exception { // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))); + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))); } @Test From 5088447111dd10f8627fe61d5602e6e331a93ff1 Mon Sep 17 00:00:00 2001 From: Michael W Spalti Date: Sat, 6 May 2023 11:30:51 -0700 Subject: [PATCH 164/686] Updated solr query params. --- .../dspace/app/iiif/service/WordHighlightSolrSearch.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java index da50f335825a..9e6022548dbe 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java @@ -118,7 +118,8 @@ private String adjustQuery(String query) { } /** - * Constructs a solr search URL. + * Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2. + * https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2 * * @param query the search terms * @param manifestId the id of the manifest in which to search @@ -132,8 +133,9 @@ private SolrQuery getSolrQuery(String query, String manifestId) { solrQuery.set("hl.ocr.fl", "ocr_text"); solrQuery.set("hl.ocr.contextBlock", "line"); solrQuery.set("hl.ocr.contextSize", "2"); - solrQuery.set("hl.snippets", "10"); - solrQuery.set("hl.ocr.trackPages", "off"); + solrQuery.set("hl.snippets", "8192"); + solrQuery.set("hl.ocr.maxPassages", "8192"); + solrQuery.set("hl.ocr.trackPages", "on"); solrQuery.set("hl.ocr.limitBlock","page"); solrQuery.set("hl.ocr.absoluteHighlights", "true"); From 7971887b9a8603aa0039f8e5f3595520c0e65c3a Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Fri, 21 Apr 2023 06:54:13 +0200 Subject: [PATCH 165/686] DURACOM-136 allow script execution by user other than admins --- .../ProcessCleanerConfiguration.java | 17 -- .../MetadataDeletionScriptConfiguration.java | 17 -- .../MetadataExportScriptConfiguration.java | 17 -- ...tadataExportSearchScriptConfiguration.java | 6 - .../MetadataImportScriptConfiguration.java | 16 -- .../harvest/HarvestScriptConfiguration.java | 14 - .../ItemExportScriptConfiguration.java | 17 -- .../ItemImportScriptConfiguration.java | 16 -- .../MediaFilterScriptConfiguration.java | 19 -- ...rDatabaseResyncCliScriptConfiguration.java | 6 - .../authorize/AuthorizeServiceImpl.java | 14 + .../authorize/service/AuthorizeService.java | 9 + .../org/dspace/content/dao/ProcessDAO.java | 23 ++ .../content/dao/impl/ProcessDAOImpl.java | 28 ++ .../curate/CurationScriptConfiguration.java | 43 ++- .../IndexDiscoveryScriptConfiguration.java | 17 -- .../OrcidBulkPushScriptConfiguration.java | 17 -- .../dspace/scripts/ProcessServiceImpl.java | 10 + .../org/dspace/scripts/ScriptServiceImpl.java | 2 +- .../configuration/ScriptConfiguration.java | 22 +- .../scripts/service/ProcessService.java | 22 ++ ...iledOpenUrlTrackerScriptConfiguration.java | 17 -- ...nFormsMigrationCliScriptConfiguration.java | 17 -- ...sionFormsMigrationScriptConfiguration.java | 36 ++- ...riptionEmailNotificationConfiguration.java | 16 -- .../builder/AbstractDSpaceObjectBuilder.java | 4 +- .../java/org/dspace/builder/ItemBuilder.java | 4 +- .../org/dspace/builder/ProcessBuilder.java | 3 + ...MockDSpaceRunnableScriptConfiguration.java | 17 -- .../app/rest/ScriptProcessesController.java | 15 +- .../repository/ProcessRestRepository.java | 16 ++ .../rest/repository/ScriptRestRepository.java | 30 +- .../app/rest/ProcessRestRepositoryIT.java | 26 ++ .../app/rest/ScriptRestRepositoryIT.java | 136 ++++++++- ...TypeConversionTestScriptConfiguration.java | 5 - .../org/dspace/curate/CurationScriptIT.java | 267 ++++++++++++++++++ ...MockDSpaceRunnableScriptConfiguration.java | 17 -- 37 files changed, 659 insertions(+), 319 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java index 8d189038d9d1..91dcfb5dfec5 100644 --- a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.administer; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. */ public class ProcessCleanerConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index 9ccd53944a24..fb228e7041b8 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 31556afc8d3d..aa76c09c0a5b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java index 4e350562bc26..4f2a225d3ac6 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java @@ -9,7 +9,6 @@ package org.dspace.app.bulkedit; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -29,11 +28,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableclass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java index 65994040badc..ce2f7fb68af1 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java @@ -8,22 +8,15 @@ package org.dspace.app.bulkedit; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataImport} script */ public class MetadataImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 982973e47c50..ff83c3ecb225 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -7,18 +7,11 @@ */ package org.dspace.app.harvest; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class HarvestScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; private Class dspaceRunnableClass; @@ -32,13 +25,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java index cf70120d27d3..b37df5f5ea59 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -7,14 +7,9 @@ */ package org.dspace.app.itemexport; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemExport} script @@ -23,9 +18,6 @@ */ public class ItemExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index a3149040c49b..fd895e2f44d6 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -8,14 +8,10 @@ package org.dspace.app.itemimport; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemImport} script @@ -24,9 +20,6 @@ */ public class ItemImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +32,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 26347c56ee96..867e684db86b 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -7,25 +7,16 @@ */ package org.dspace.app.mediafilter; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class MediaFilterScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; @@ -36,16 +27,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java index b238ccf061f3..067c76cce8b3 100644 --- a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -8,7 +8,6 @@ package org.dspace.app.solrdatabaseresync; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -27,11 +26,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableCl this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 34543c078ae2..bfd933f48265 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -43,6 +43,7 @@ import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -809,6 +810,19 @@ public boolean isCollectionAdmin(Context context) throws SQLException { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 36679f94c6a4..86ff23616867 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -532,6 +532,15 @@ void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index 69bac319c6d6..95ec40c7a542 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -14,6 +14,7 @@ import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; @@ -97,4 +98,26 @@ int countTotalWithParameters(Context context, ProcessQueryParameterContainer pro List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 23ce6ce381b5..d719b5006c14 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -24,6 +24,7 @@ import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.Process_; @@ -168,6 +169,33 @@ public List findByStatusAndCreationTimeOlderThan(Context context, List< return list(context, criteriaQuery, false, Process.class, -1, -1); } + @Override + public List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID))); + criteriaQuery.orderBy(orderList); + + return list(context, criteriaQuery, false, Process.class, limit, offset); + } + + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + return count(context, criteriaQuery, criteriaBuilder, processRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768ea..2587e6b0251e 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba62..8707b733a637 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public Class getDspaceRunnableClass() { return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java index 1a657343c017..88a1033eca5f 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.orcid.script; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Script configuration for {@link OrcidBulkPush}. @@ -24,20 +19,8 @@ */ public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index 33fea75add92..2e14aeaa36c0 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -129,6 +129,11 @@ public List findAllSortByStartTime(Context context) throws SQLException return processes; } + @Override + public List findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException { + return processDAO.findByUser(context, eperson, limit, offset); + } + @Override public void start(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.RUNNING); @@ -311,6 +316,11 @@ public List findByStatusAndCreationTimeOlderThan(Context context, List< return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); } + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + return processDAO.countByUser(context, user); + } + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder sb = new StringBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index c8a7812a5159..abb700cb10c9 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -37,7 +37,7 @@ public ScriptConfiguration getScriptConfiguration(String name) { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) .sorted(Comparator.comparing(ScriptConfiguration::getName)) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f444a..e22063eb4954 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,28 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,6 +81,7 @@ public void setName(String name) { * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually @@ -77,7 +89,13 @@ public void setName(String name) { * @param context The relevant DSpace context * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } /** * The getter for the options of the Script diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index ce6a173b0eda..c6fc24888155 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -255,4 +255,26 @@ void createLogBitstream(Context context, Process process) */ List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index dcae4aa4cbcd..7d1015c8e2ba 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java index 41b15ddd7a5a..894d3491a181 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.submit.migration; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script @@ -23,9 +18,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java index af3574da699e..6d9f3198fe26 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java @@ -7,7 +7,12 @@ */ package org.dspace.submit.migration; +import java.util.List; + +import org.apache.commons.cli.Options; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; /** * Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so @@ -15,10 +20,37 @@ * * @author Maria Verdonck (Atmire) on 05/01/2021 */ -public class SubmissionFormsMigrationScriptConfiguration extends SubmissionFormsMigrationCliScriptConfiguration { +public class SubmissionFormsMigrationScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java index 52685b563d9b..dd61fab9671c 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -8,15 +8,11 @@ package org.dspace.subscriptions; -import java.sql.SQLException; import java.util.Objects; import org.apache.commons.cli.Options; -import org.dspace.authorize.AuthorizeServiceImpl; -import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them @@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration dspaceRunnableClass; - @Autowired - private AuthorizeServiceImpl authorizeService; - - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (Objects.isNull(options)) { diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index ff1083d318d9..b20515017af0 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -162,8 +162,8 @@ protected > B setOnlyReadPermission(DSp return (B) this; } /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson. + * If another ADMIN policy is in place for an eperson it will be replaced * * @param dso * the DSpaceObject on which grant the permission diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index 70dea309f2cf..3e5ab0f38f5b 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -353,9 +353,9 @@ public ItemBuilder withOtherIdentifier(String identifier) { } /** - * Create an admin group for the collection with the specified members + * Assign the admin permission to the specified eperson * - * @param ePerson epersons to add to the admin group + * @param ePerson the eperson that will get the ADMIN permission on the item * @return this builder * @throws SQLException * @throws AuthorizeException diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 86573940e416..0631e1b55a37 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -113,6 +113,9 @@ public void delete(Context c, Process dso) throws Exception { } public static void deleteProcess(Integer integer) throws SQLException, IOException { + if (integer == null) { + return; + } try (Context c = new Context()) { c.turnOffAuthorisationSystem(); Process process = processService.find(c, integer); diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af762..632b4e2f83f4 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 196cade5dd51..3aeec9535be5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.ProcessRest; import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.model.hateoas.ProcessResource; @@ -24,6 +25,7 @@ import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; @@ -59,8 +61,8 @@ public class ScriptProcessesController { * @return The ProcessResource object for the created process * @throws Exception If something goes wrong */ - @RequestMapping(method = RequestMethod.POST) - @PreAuthorize("hasAuthority('ADMIN')") + @RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") public ResponseEntity> startProcess( @PathVariable(name = "name") String scriptName, @RequestParam(name = "file", required = false) List files) @@ -75,4 +77,13 @@ public ResponseEntity> startProcess( return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); } + @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") + public ResponseEntity> startProcessInvalidMimeType( + @PathVariable(name = "name") String scriptName, + @RequestParam(name = "file", required = false) List files) + throws Exception { + throw new DSpaceBadRequestException("Invalid mimetype"); + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java index 33addf704916..2479eeda97f5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java @@ -94,6 +94,22 @@ public Page findAll(Context context, Pageable pageable) { } } + @SearchRestMethod(name = "own") + @PreAuthorize("hasAuthority('AUTHENTICATED')") + public Page findByCurrentUser(Pageable pageable) { + + try { + Context context = obtainContext(); + long total = processService.countByUser(context, context.getCurrentUser()); + List processes = processService.findByUser(context, context.getCurrentUser(), + pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(processes, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Calls on the getBitstreams method to retrieve all the Bitstreams of this process * @param processId The processId of the Process to retrieve the Bitstreams for diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index d974a6d78a6c..2fc996a327cc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -37,6 +37,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -56,29 +57,24 @@ public class ScriptRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context); @@ -104,11 +100,17 @@ public ProcessRest startProcess(Context context, String scriptName, List dSpaceCommandLineParameters = processPropertiesToDSpaceCommandLineParameters(properties); ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + if (scriptToExecute == null) { - throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found"); + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); } - if (!scriptToExecute.isAllowedToExecute(context)) { - throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName); + try { + if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) { + throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName + + " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", ")); + } + } catch (IllegalArgumentException e) { + throw new DSpaceBadRequestException("missed handle"); } RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler( context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters, diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 5ac416e6067d..d76e20b23d19 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.matcher.ProcessMatcher.matchProcess; +import static org.dspace.content.ProcessStatus.SCHEDULED; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -783,6 +785,30 @@ public void searchProcessTestByUserSortedOnNonExistingIsSortedAsDefault() throws .andExpect(status().isBadRequest()); } + @Test + public void testFindByCurrentUser() throws Exception { + + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + ProcessBuilder.createProcess(context, admin, "mock-script", parameters) + .withStartAndEndTime("11/01/1990", "19/01/1990") + .build(); + Process process3 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("12/01/1990", "18/01/1990") + .build(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/system/processes/search/own")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.processes", contains( + matchProcess(process3.getName(), eperson.getID().toString(), process3.getID(), parameters, SCHEDULED), + matchProcess(process1.getName(), eperson.getID().toString(), process1.getID(), parameters, SCHEDULED)))) + .andExpect(jsonPath("$.page", is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2)))); + + } + @Test public void getProcessOutput() throws Exception { try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 07edfeec33d3..16e691ef6f95 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -12,6 +12,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -44,6 +45,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; @@ -53,6 +55,7 @@ import org.dspace.content.Item; import org.dspace.content.ProcessStatus; import org.dspace.content.service.BitstreamService; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; @@ -123,12 +126,65 @@ public void findAllScriptsSortedAlphabeticallyTest() throws Exception { @Test - public void findAllScriptsUnauthorizedTest() throws Exception { + public void findAllScriptsGenericLoggedInUserTest() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/system/scripts")) - .andExpect(status().isForbidden()); + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + @Test + public void findAllScriptsLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + // the local admins have at least access to the curate script + // and not access to process-cleaner script + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); } @Test @@ -222,6 +278,63 @@ public void findOneScriptByNameTest() throws Exception { )); } + @Test + public void findOneScriptByNameLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + } + + @Test + public void findOneScriptByNameNotAuthenticatedTest() throws Exception { + getClient().perform(get("/api/system/scripts/mock-script")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneScriptByNameTestAccessDenied() throws Exception { String token = getAuthToken(eperson.getEmail(), password); @@ -235,7 +348,7 @@ public void findOneScriptByInvalidNameBadRequestExceptionTest() throws Exception String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get("/api/system/scripts/mock-script-invalid")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -277,16 +390,6 @@ public void postProcessAdminWrongOptionsException() throws Exception { @Test public void postProcessAdminNoOptionsFailedStatus() throws Exception { -// List list = new LinkedList<>(); -// -// ParameterValueRest parameterValueRest = new ParameterValueRest(); -// parameterValueRest.setName("-z"); -// parameterValueRest.setValue("test"); -// ParameterValueRest parameterValueRest1 = new ParameterValueRest(); -// parameterValueRest1.setName("-q"); -// list.add(parameterValueRest); -// list.add(parameterValueRest1); - LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-z", "test")); @@ -322,7 +425,7 @@ public void postProcessNonExistingScriptNameException() throws Exception { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -434,6 +537,8 @@ public void postProcessAndVerifyOutput() throws Exception { } + + @Test public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { @@ -601,9 +706,9 @@ public void TrackSpecialGroupduringprocessSchedulingTest() throws Exception { } } - @After public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { try { processService.delete(context, process); @@ -611,6 +716,7 @@ public void destroy() throws Exception { throw new RuntimeException(e); } }); + context.restoreAuthSystemState(); super.destroy(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java index 27c37f1487e4..ccb7d43a2378 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java @@ -11,7 +11,6 @@ import org.apache.commons.cli.Options; import org.dspace.app.rest.converter.ScriptConverter; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -28,10 +27,6 @@ public void setDspaceRunnableClass(final Class dspaceRunnableClass) { } - public boolean isAllowedToExecute(final Context context) { - return true; - } - public Options getOptions() { Options options = new Options(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java index a528f4351356..3e40a8559482 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java @@ -14,6 +14,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.File; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.concurrent.atomic.AtomicReference; @@ -29,13 +30,19 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; +import org.dspace.content.Site; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -49,6 +56,9 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest { @Autowired private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; + @Autowired + private ScriptService scriptService; + private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME; private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/curate/" + ProcessRest.PLURAL_NAME; @@ -371,6 +381,263 @@ public void curateScript_EPersonInParametersFails() throws Exception { } } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins can only run the curate script on their own objects + */ + @Test + public void securityCurateTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Community anotherCommunity = CommunityBuilder.createCommunity(context) + .withName("Another Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Collection anotherCollection = CollectionBuilder.createCollection(context, anotherCommunity) + .withName("AnotherCollection") + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + Item anotherItem = ItemBuilder.createItem(context, anotherCollection) + .withTitle("Another Test item to curate").build(); + Site site = ContentServiceFactory.getInstance().getSiteService().findSite(context); + context.restoreAuthSystemState(); + LinkedList siteParameters = new LinkedList<>(); + siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle())); + siteParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList comParameters = new LinkedList<>(); + comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle())); + comParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherComParameters = new LinkedList<>(); + anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle())); + anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList colParameters = new LinkedList<>(); + colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle())); + colParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherColParameters = new LinkedList<>(); + anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle())); + anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList itemParameters = new LinkedList<>(); + itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle())); + itemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherItemParameters = new LinkedList<>(); + anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle())); + anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + + List listCurateSite = siteParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCom = comParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCom = anotherComParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCol = colParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCol = anotherColParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listItem = itemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherItem = anotherItemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + String adminToken = getAuthToken(admin.getEmail(), password); + List acceptableProcessStatuses = new LinkedList<>(); + acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED, + ProcessStatus.RUNNING, + ProcessStatus.COMPLETED)); + + AtomicReference idSiteRef = new AtomicReference<>(); + AtomicReference idComRef = new AtomicReference<>(); + AtomicReference idComColRef = new AtomicReference<>(); + AtomicReference idComItemRef = new AtomicReference<>(); + AtomicReference idColRef = new AtomicReference<>(); + AtomicReference idColItemRef = new AtomicReference<>(); + AtomicReference idItemRef = new AtomicReference<>(); + + ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate"); + // we should be able to start the curate script with all our admins on the respective dso + try { + // start a process as general admin + getClient(adminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(admin.getID()), + siteParameters, + acceptableProcessStatuses)))) + .andDo(result -> idSiteRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // check with the com admin + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + comParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be able to run the curate also over the children collection and item + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be NOT able to run the curate over other com, col or items + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCom))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the col admin + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the col admin should be able to run the curate also over the owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // the col admin should be NOT able to run the curate over the community nor another collection nor + // on a not owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the item admin + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(itemAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the item admin should be NOT able to run the curate over the community nor the collection nor + // on a not owned item + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + } finally { + ProcessBuilder.deleteProcess(idSiteRef.get()); + ProcessBuilder.deleteProcess(idComRef.get()); + ProcessBuilder.deleteProcess(idComColRef.get()); + ProcessBuilder.deleteProcess(idComItemRef.get()); + ProcessBuilder.deleteProcess(idColRef.get()); + ProcessBuilder.deleteProcess(idColItemRef.get()); + ProcessBuilder.deleteProcess(idItemRef.get()); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af762..632b4e2f83f4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { From 7ba68c40ab47863d3530e28ac9d3572c55dc5537 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Thu, 4 May 2023 15:58:04 +0200 Subject: [PATCH 166/686] [CST-5728] LYRASIS [Donated]: DSpace7: Implement signposting pattern in DSpace (REST) --- .../dspace/content/MetadataSchemaEnum.java | 3 +- .../RelationshipMetadataServiceImpl.java | 3 +- .../org/dspace/util/FrontendUrlService.java | 74 ++++ .../java/org/dspace/app/rest/Application.java | 19 +- .../security/WebSecurityConfiguration.java | 2 +- .../controller/LinksetRestController.java | 227 ++++++++++++ .../converter/LinksetConverter.java | 37 ++ .../LinksetHttpMessageConverter.java | 81 +++++ .../signposting/converter/LsetConverter.java | 37 ++ .../signposting/hateoas/LinksetResource.java | 26 ++ .../app/rest/signposting/model/Linkset.java | 127 +++++++ .../rest/signposting/model/LinksetRest.java | 73 ++++ .../app/rest/signposting/model/Lset.java | 68 ++++ .../app/rest/signposting/model/Relation.java | 34 ++ .../rest/signposting/model/TypedLinkRest.java | 97 +++++ .../BitstreamSignPostingProcessor.java | 28 ++ .../processor/ItemSignPostingProcessor.java | 30 ++ .../processor/SignPostingProcessor.java | 30 ++ .../relation/ASignPostingProcessor.java | 48 +++ .../relation/BitstreamLicenseProcessor.java | 74 ++++ .../relation/BitstreamLinksetProcessor.java | 85 +++++ .../BitstreamPublicationBundaryProcessor.java | 76 ++++ .../relation/BitstreamTypeProcessor.java | 74 ++++ .../relation/ItemAuthorProcessor.java | 109 ++++++ .../relation/ItemIdentifierProcessor.java | 89 +++++ .../relation/ItemLandingPageProcessor.java | 59 +++ .../relation/ItemLicenseProcessor.java | 79 ++++ .../ItemPublicationBundaryProcessor.java | 87 +++++ .../relation/ItemTypeProcessor.java | 79 ++++ .../app/rest/utils/ApplicationConfig.java | 27 ++ .../controller/LinksetRestControllerIT.java | 336 ++++++++++++++++++ dspace/config/dspace.cfg | 1 + dspace/config/modules/signposting.cfg | 29 ++ dspace/config/spring/api/core-services.xml | 2 + dspace/config/spring/rest/signposting.xml | 46 +++ 35 files changed, 2291 insertions(+), 5 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java create mode 100644 dspace/config/modules/signposting.cfg create mode 100644 dspace/config/spring/rest/signposting.xml diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java index deca62566aae..559e3bf5cf5a 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java @@ -16,7 +16,8 @@ public enum MetadataSchemaEnum { DC("dc"), EPERSON("eperson"), - RELATION("relation"); + RELATION("relation"), + PERSON("person"); /** * The String representation of the MetadataSchemaEnum diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java index 14ed441b819e..c6cf21a55fc7 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java @@ -110,7 +110,8 @@ protected List findLatestForDiscoveryMetadataValues( // on the left item as a storage/performance improvement. // As a consequence, when searching for related items (using discovery) // on the pages of the right items you won't be able to find the left item. - if (relationshipType.getTilted() != RIGHT && relationshipType.getLeftType().equals(itemEntityType)) { + if (relationshipType.getTilted() != RIGHT + && Objects.equals(relationshipType.getLeftType(), itemEntityType)) { String element = relationshipType.getLeftwardType(); List data = relationshipService .findByLatestItemAndRelationshipType(context, item, relationshipType, true); diff --git a/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java new file mode 100644 index 000000000000..5e15c9c50a9e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.lowerCase; + +import java.util.List; +import java.util.Optional; + +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Service class for generation of front-end urls. + */ +@Component +public class FrontendUrlService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(FrontendUrlService.class); + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private SearchService searchService; + + /** + * Generates front-end url for specified item. + * + * @param item item + * @return front-end url + */ + public String generateUrl(Item item) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + Context context = new Context(Context.Mode.READ_ONLY); + return generateUrlWithSearchService(item, uiURL, context) + .orElseGet(() -> uiURL + "/items/" + item.getID()); + } + + private Optional generateUrlWithSearchService(Item item, String uiURLStem, Context context) { + DiscoverQuery entityQuery = new DiscoverQuery(); + entityQuery.setQuery("search.uniqueid:\"Item-" + item.getID() + "\" and entityType:*"); + entityQuery.addSearchField("entityType"); + + try { + DiscoverResult discoverResult = searchService.search(context, entityQuery); + if (isNotEmpty(discoverResult.getIndexableObjects())) { + List entityTypes = discoverResult.getSearchDocument(discoverResult.getIndexableObjects() + .get(0)).get(0).getSearchFieldValues("entityType"); + if (isNotEmpty(entityTypes) && isNotBlank(entityTypes.get(0))) { + return Optional.of(uiURLStem + "/entities/" + lowerCase(entityTypes.get(0)) + "/" + item.getID()); + } + } + } catch (SearchServiceException e) { + log.error("Failed getting entitytype through solr for item " + item.getID() + ": " + e.getMessage()); + } + return Optional.empty(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java index f8922f528c0d..07b802b684ee 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java @@ -178,12 +178,15 @@ public void addCorsMappings(@NonNull CorsRegistry registry) { // Get allowed origins for api and iiif endpoints. // The actuator endpoints are configured using management.endpoints.web.cors.* properties String[] corsAllowedOrigins = configuration - .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); + .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); String[] iiifAllowedOrigins = configuration - .getCorsAllowedOrigins(configuration.getIiifAllowedOriginsConfig()); + .getCorsAllowedOrigins(configuration.getIiifAllowedOriginsConfig()); + String[] signpostingAllowedOrigins = configuration + .getCorsAllowedOrigins(configuration.getSignpostingAllowedOriginsConfig()); boolean corsAllowCredentials = configuration.getCorsAllowCredentials(); boolean iiifAllowCredentials = configuration.getIiifAllowCredentials(); + boolean signpostingAllowCredentials = configuration.getSignpostingAllowCredentials(); if (corsAllowedOrigins != null) { registry.addMapping("/api/**").allowedMethods(CorsConfiguration.ALL) // Set Access-Control-Allow-Credentials to "true" and specify which origins are valid @@ -209,6 +212,18 @@ public void addCorsMappings(@NonNull CorsRegistry registry) { // Allow list of response headers allowed to be sent by us (the server) to the client .exposedHeaders("Authorization", "DSPACE-XSRF-TOKEN", "Location", "WWW-Authenticate"); } + if (signpostingAllowedOrigins != null) { + registry.addMapping("/signposting/**").allowedMethods(CorsConfiguration.ALL) + // Set Access-Control-Allow-Credentials to "true" and specify which origins are valid + // for our Access-Control-Allow-Origin header + .allowCredentials(signpostingAllowCredentials).allowedOrigins(signpostingAllowedOrigins) + // Allow list of request preflight headers allowed to be sent to us from the client + .allowedHeaders("Accept", "Authorization", "Content-Type", "Origin", "X-On-Behalf-Of", + "X-Requested-With", "X-XSRF-TOKEN", "X-CORRELATION-ID", "X-REFERRER", + "x-recaptcha-token", "access-control-allow-headers") + // Allow list of response headers allowed to be sent by us (the server) to the client + .exposedHeaders("Authorization", "DSPACE-XSRF-TOKEN", "Location", "WWW-Authenticate"); + } } /** diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java index fad60c20d250..7bd698a63a60 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java @@ -87,7 +87,7 @@ protected void configure(HttpSecurity http) throws Exception { // Configure authentication requirements for ${dspace.server.url}/api/ URL only // NOTE: REST API is hardcoded to respond on /api/. Other modules (OAI, SWORD, IIIF, etc) use other root paths. http.requestMatchers() - .antMatchers("/api/**", "/iiif/**", actuatorBasePath + "/**") + .antMatchers("/api/**", "/iiif/**", actuatorBasePath + "/**", "/signposting/**") .and() // Enable Spring Security authorization on these paths .authorizeRequests() diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java new file mode 100644 index 000000000000..9197e2cdf407 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -0,0 +1,227 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.controller; + +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.model.TypedLinkRest; +import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.rest.utils.Utils; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; +import org.dspace.utils.DSpace; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * This RestController takes care of the retrieval of {@link LinksetRest}. + * This class will receive the UUID of an {@link Item} or {@link Bitstream}. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@RestController +@RequestMapping("/${signposting.path:signposting}") +@ConditionalOnProperty("signposting.enabled") +public class LinksetRestController { + + @Autowired + private Utils utils; + @Autowired + private BitstreamService bitstreamService; + @Autowired + private ItemService itemService; + @Autowired + private HandleService handleService; + @Autowired + private ConfigurationService configurationService; + @Autowired + private ConverterService converter; + + @PreAuthorize("permitAll()") + @RequestMapping(method = RequestMethod.GET) + public ResponseEntity getAll() { + return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).build(); + } + + @PreAuthorize("permitAll()") + @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + public ResponseEntity getOne() { + return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).build(); + } + + @PreAuthorize("permitAll()") + @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/json", + method = RequestMethod.GET, produces = "application/linkset+json") + public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) { + try { + Context context = ContextUtil.obtainContext(request); + + DSpaceObject dso = null; + dso = itemService.find(context, uuid); + if (dso == null) { + throw new ResourceNotFoundException("No such Item: " + uuid); + } + + List linksets = new ArrayList<>(); + Linkset primaryLinkset = new Linkset(); + linksets.add(primaryLinkset); + + if (dso.getType() == Constants.ITEM) { + primaryLinkset.setAnchor(handleService.resolveToURL( + context, dso.getHandle())); + List ispp = new DSpace().getServiceManager() + .getServicesByType(ItemSignPostingProcessor.class); + for (ItemSignPostingProcessor sp : ispp) { + sp.buildRelation(context, request, (Item) dso, linksets, primaryLinkset); + } + } + + LinksetRest linksetRest = null; + for (Linkset linkset : linksets) { + if (linksetRest == null) { + linksetRest = converter.toRest(linkset, utils.obtainProjection()); + } else { + linksetRest.getLinkset().add(linkset); + } + } + return linksetRest; + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @PreAuthorize("permitAll()") + @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, + method = RequestMethod.GET, produces = "application/linkset") + public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) { + try { + Context context = ContextUtil.obtainContext(request); + + DSpaceObject dso = null; + dso = itemService.find(context, uuid); + if (dso == null) { + throw new ResourceNotFoundException("No such Item: " + uuid); + } + + List lsets = new ArrayList<>(); + if (dso.getType() == Constants.ITEM) { + List ispp = new DSpace().getServiceManager() + .getServicesByType(ItemSignPostingProcessor.class); + for (ItemSignPostingProcessor sp : ispp) { + sp.buildLset(context, request, (Item) dso, lsets); + } + } + + LinksetRest linksetRest = null; + for (Lset lset : lsets) { + if (linksetRest == null) { + linksetRest = converter.toRest(lset, utils.obtainProjection()); + } else { + linksetRest.getLset().add(lset); + } + } + return linksetRest; + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @PreAuthorize("permitAll()") + @RequestMapping(value = "/links" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + public List getHeader(HttpServletRequest request, @PathVariable UUID uuid) { + try { + Context context = ContextUtil.obtainContext(request); + + DSpaceObject dso = null; + dso = bitstreamService.find(context, uuid); + if (dso == null) { + dso = itemService.find(context, uuid); + if (dso == null) { + throw new ResourceNotFoundException("No such resource: " + uuid); + } + } + + List linksets = new ArrayList<>(); + Linkset primaryLinkset = new Linkset(); + linksets.add(primaryLinkset); + + if (dso.getType() == Constants.ITEM) { + primaryLinkset.setAnchor(handleService.resolveToURL( + context, dso.getHandle())); + List ispp = new DSpace().getServiceManager() + .getServicesByType(ItemSignPostingProcessor.class); + for (ItemSignPostingProcessor sp : ispp) { + sp.buildRelation(context, request, (Item) dso, linksets, primaryLinkset); + } + } else { + List bspp = new DSpace().getServiceManager() + .getServicesByType(BitstreamSignPostingProcessor.class); + for (BitstreamSignPostingProcessor sp : bspp) { + sp.buildRelation(context, request, (Bitstream) dso, linksets, primaryLinkset); + } + String url = configurationService.getProperty("dspace.ui.url"); + primaryLinkset.setAnchor(url + "/bitstreams/" + dso.getID() + "/download"); + } + + return linksets.stream() + .flatMap(linkset -> mapTypedLinks(linkset).stream()) + .collect(Collectors.toList()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private static List mapTypedLinks(Linkset linkset) { + return Stream.of( + mapTypedLinks(TypedLinkRest.Relation.LANDING_PAGE, linkset.getLandingPage()), + mapTypedLinks(TypedLinkRest.Relation.ITEM, linkset.getItem()), + mapTypedLinks(TypedLinkRest.Relation.CITE_AS, linkset.getCiteAs()), + mapTypedLinks(TypedLinkRest.Relation.AUTHOR, linkset.getAuthor()), + mapTypedLinks(TypedLinkRest.Relation.TYPE, linkset.getType()), + mapTypedLinks(TypedLinkRest.Relation.LICENSE, linkset.getLicense()), + mapTypedLinks(TypedLinkRest.Relation.COLLECTION, linkset.getCollection()), + mapTypedLinks(TypedLinkRest.Relation.LINKSET, linkset.getLinkset()) + ).flatMap(List::stream).collect(Collectors.toList()); + } + + private static List mapTypedLinks(TypedLinkRest.Relation relationType, List relations) { + return relations.stream() + .map(relation -> new TypedLinkRest(relation.getHref(), relationType, relation.getType())) + .collect(Collectors.toList()); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java new file mode 100644 index 000000000000..e8f151f14a00 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.converter; + +import org.dspace.app.rest.converter.DSpaceConverter; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.springframework.stereotype.Component; + + +/** + * This is the converter from/to the Linkset in the DSpace API data model and the REST data model. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@Component +public class LinksetConverter implements DSpaceConverter { + + @Override + public LinksetRest convert(Linkset linkset, Projection projection) { + LinksetRest linksetRest = new LinksetRest(); + linksetRest.setProjection(projection); + linksetRest.getLinkset().add(linkset); + return linksetRest; + } + + @Override + public Class getModelClass() { + return Linkset.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java new file mode 100644 index 000000000000..0cc6628c3bc8 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java @@ -0,0 +1,81 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.converter; + +import static java.lang.String.format; +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import java.io.IOException; +import java.lang.reflect.Type; + +import org.apache.commons.lang.NotImplementedException; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.springframework.http.HttpInputMessage; +import org.springframework.http.HttpOutputMessage; +import org.springframework.http.MediaType; +import org.springframework.http.converter.AbstractGenericHttpMessageConverter; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.http.converter.HttpMessageNotWritableException; + +/** + * Converter for converting LinksetRest message into application/linkset format. + */ +public class LinksetHttpMessageConverter extends AbstractGenericHttpMessageConverter { + + public LinksetHttpMessageConverter() { + super(MediaType.valueOf("application/linkset")); + } + + @Override + protected void writeInternal(LinksetRest linksetRest, Type type, HttpOutputMessage outputMessage) + throws IOException, HttpMessageNotWritableException { + StringBuilder responseBody = new StringBuilder(); + linksetRest.getLset().forEach(lset -> { + if (isNotBlank(lset.getLink())) { + responseBody.append(format("<%s> ", lset.getLink())); + } + if (isNotBlank(lset.getRelation())) { + responseBody.append(format("; rel=\"%s\" ", lset.getRelation())); + } + if (isNotBlank(lset.getType())) { + responseBody.append(format("; type=\"%s\" ", lset.getType())); + } + if (isNotBlank(lset.getAnchor())) { + responseBody.append(format("; anchor=\"%s\" ", lset.getAnchor())); + } + responseBody.append(", "); + }); + outputMessage.getBody().write(responseBody.toString().trim().getBytes()); + outputMessage.getBody().flush(); + } + + @Override + protected LinksetRest readInternal(Class clazz, HttpInputMessage inputMessage) + throws HttpMessageNotReadableException { + throw new NotImplementedException(); + } + + @Override + protected boolean canRead(MediaType mediaType) { + return false; + } + + @Override + public boolean canWrite(Class clazz, MediaType mediaType) { + boolean isAppropriateClass = LinksetRest.class.isAssignableFrom(clazz); + boolean isAppropriateMediaType = getSupportedMediaTypes().stream() + .anyMatch(supportedType -> supportedType.isCompatibleWith(mediaType)); + return isAppropriateClass && isAppropriateMediaType; + } + + @Override + public LinksetRest read(Type type, Class contextClass, HttpInputMessage inputMessage) + throws IOException, HttpMessageNotReadableException { + throw new NotImplementedException(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java new file mode 100644 index 000000000000..1e2e3884a434 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.converter; + +import org.dspace.app.rest.converter.DSpaceConverter; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.dspace.app.rest.signposting.model.Lset; +import org.springframework.stereotype.Component; + + +/** + * This is the converter from/to the Lset in the DSpace API data model and the REST data model. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@Component +public class LsetConverter implements DSpaceConverter { + + @Override + public LinksetRest convert(Lset lset, Projection projection) { + LinksetRest linksetRest = new LinksetRest(); + linksetRest.setProjection(projection); + linksetRest.getLset().add(lset); + return linksetRest; + } + + @Override + public Class getModelClass() { + return Lset.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java new file mode 100644 index 000000000000..8a0c2158d1ea --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.hateoas; + +import org.dspace.app.rest.model.hateoas.DSpaceResource; +import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; +import org.dspace.app.rest.signposting.model.LinksetRest; +import org.dspace.app.rest.utils.Utils; + +/** + * Linkset Rest HAL Resource. The HAL Resource wraps the REST Resource + * adding support for the links and embedded resources + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@RelNameDSpaceResource(LinksetRest.NAME) +public class LinksetResource extends DSpaceResource { + public LinksetResource(LinksetRest linkset, Utils utils) { + super(linkset, utils); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java new file mode 100644 index 000000000000..f97d7a5df39c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java @@ -0,0 +1,127 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DTO object represents a set of links. + */ +public class Linkset { + + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List author; + @JsonProperty("cite-as") + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List citeAs; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List item; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List collection; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List landingPage; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List type; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List license; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List linkset; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String anchor; + + public List getAuthor() { + if (this.author == null) { + this.author = new ArrayList<>(); + } + return author; + } + public void setAuthor(List author) { + this.author = author; + } + + public List getCiteAs() { + if (this.citeAs == null) { + this.citeAs = new ArrayList<>(); + } + return citeAs; + } + public void setCiteAs(List citeAs) { + this.citeAs = citeAs; + } + + public List getItem() { + if (this.item == null) { + this.item = new ArrayList<>(); + } + return item; + } + public void setItem(List item) { + this.item = item; + } + + public List getCollection() { + if (this.collection == null) { + this.collection = new ArrayList<>(); + } + return collection; + } + public void setCollection(List collection) { + this.collection = collection; + } + + public List getLandingPage() { + if (landingPage == null) { + landingPage = new ArrayList<>(); + } + return landingPage; + } + public void setLandingPage(List landingPage) { + this.landingPage = landingPage; + } + + public List getType() { + if (type == null) { + type = new ArrayList<>(); + } + return type; + } + public void setType(List type) { + this.type = type; + } + + public List getLicense() { + if (license == null) { + license = new ArrayList<>(); + } + return license; + } + public void setLicense(List license) { + this.license = license; + } + + public List getLinkset() { + if (linkset == null) { + linkset = new ArrayList<>(); + } + return linkset; + } + public void setLinkset(List linkset) { + this.linkset = linkset; + } + + public String getAnchor() { + return anchor; + } + public void setAnchor(String anchor) { + this.anchor = anchor; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java new file mode 100644 index 000000000000..6558d75d5851 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import org.dspace.app.rest.RestResourceController; +import org.dspace.app.rest.model.LinksRest; +import org.dspace.app.rest.model.RestAddressableModel; + +/** + * The REST object for the Linkset objects. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +@LinksRest +public class LinksetRest extends RestAddressableModel { + public static final String NAME = "linkset"; + public static final String PLURAL_NAME = "linksets"; + public static final String CATEGORY = RestAddressableModel.CORE; + + public static final String JSON = "json"; + + @JsonInclude(Include.NON_EMPTY) + private List linkset; + @JsonInclude(Include.NON_EMPTY) + private List lset; + + public List getLinkset() { + if (this.linkset == null) { + this.linkset = new ArrayList<>(); + } + return linkset; + } + public void setLinkset(List linkset) { + this.linkset = linkset; + } + + public List getLset() { + if (this.lset == null) { + this.lset = new ArrayList<>(); + } + return lset; + } + public void setLset(List lset) { + this.lset = lset; + } + + @JsonIgnore + @Override + public String getType() { + return NAME; + } + + @Override + public String getCategory() { + return CATEGORY; + } + + @Override + public Class getController() { + return RestResourceController.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java new file mode 100644 index 000000000000..dbfabfcb00ae --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * DTO object represents a node of a link set. + */ +public class Lset { + + @JsonInclude(JsonInclude.Include.NON_NULL) + private String link; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String relation; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String type; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String anchor; + + public Lset(String link, String relation, String type, String anchor) { + this(link, relation, anchor); + this.type = type; + } + + public Lset(String link, String relation, String anchor) { + this.link = link; + this.relation = relation; + this.anchor = anchor; + } + + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + + public String getRelation() { + return relation; + } + + public void setRelation(String relation) { + this.relation = relation; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getAnchor() { + return anchor; + } + + public void setAnchor(String anchor) { + this.anchor = anchor; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java new file mode 100644 index 000000000000..9b3139fe1ee6 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * DTO object represents a relation to specific resource. + */ +public class Relation { + + @JsonInclude(JsonInclude.Include.NON_NULL) + private String href; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String type; + + public Relation(String href, String type) { + this.href = href; + this.type = type; + } + + public String getHref() { + return href; + } + + public String getType() { + return type; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java new file mode 100644 index 000000000000..2a6bcc4a012f --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java @@ -0,0 +1,97 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonValue; +import org.dspace.app.rest.RestResourceController; +import org.dspace.app.rest.model.LinksRest; +import org.dspace.app.rest.model.RestAddressableModel; + +/** + * The REST object represents Typed Link. + */ +@LinksRest +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class TypedLinkRest extends RestAddressableModel { + public static final String NAME = "linkset"; + public static final String PLURAL_NAME = "linksets"; + public static final String CATEGORY = RestAddressableModel.CORE; + + private String href; + + private Relation rel; + + private String type; + + public TypedLinkRest() { + } + + public TypedLinkRest(String href, Relation rel, String type) { + this.href = href; + this.rel = rel; + this.type = type; + } + + public String getHref() { + return href; + } + + public void setHref(String href) { + this.href = href; + } + + public Relation getRel() { + return rel; + } + + public void setRel(Relation rel) { + this.rel = rel; + } + + public void setType(String type) { + this.type = type; + } + + @Override + public String getType() { + return type; + } + + @Override + public String getCategory() { + return CATEGORY; + } + + @Override + public Class getController() { + return RestResourceController.class; + } + + public enum Relation { + LANDING_PAGE("landing page"), + ITEM("item"), + CITE_AS("cite-as"), + AUTHOR("author"), + TYPE("type"), + LICENSE("license"), + COLLECTION("collection"), + LINKSET("linkset"); + + private final String name; + + Relation(String name) { + this.name = name; + } + + @JsonValue + public String getName() { + return name; + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java new file mode 100644 index 000000000000..e80c3b4c39c2 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor; + +import org.dspace.content.Bitstream; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * BitstreamSignPostingProcessor interface represents SignPostingProcessor for a bitstream. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public interface BitstreamSignPostingProcessor extends SignPostingProcessor { + + default String buildAnchor(Bitstream bitstream) { + ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + String url = configurationService.getProperty("dspace.ui.url"); + return url + "/bitstreams/" + bitstream.getID() + "/download"; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java new file mode 100644 index 000000000000..2d4b21464cb3 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor; + +import java.sql.SQLException; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; + +/** + * ItemSignPostingProcessor interface represents SignPostingProcessor for an item. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public interface ItemSignPostingProcessor extends SignPostingProcessor { + + default String buildAnchor(Context context, Item item) throws SQLException { + HandleService handleService = + HandleServiceFactory.getInstance().getHandleService(); + return handleService.resolveToURL(context, item.getHandle()); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java new file mode 100644 index 000000000000..aab13b6626b1 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; + +/** + * SignPostingProcessor interface. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public interface SignPostingProcessor { + void buildRelation(Context context, HttpServletRequest request, + T object, List linksets, Linkset primaryLinkset); + + void buildLset(Context context, HttpServletRequest request, + T object, List lsets); +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java new file mode 100644 index 000000000000..22ae0a72b704 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +/** + * An abstract class of generic signposting relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public abstract class ASignPostingProcessor { + + private String metadataField; + + private String relation; + + private String pattern; + + public String getMetadataField() { + return metadataField; + } + + public void setMetadataField(String metadataField) { + this.metadataField = metadataField; + } + + public String getRelation() { + return relation; + } + + public void setRelation(String relation) { + this.relation = relation; + } + + public String getPattern() { + return pattern; + } + + public void setPattern(String pattern) { + this.pattern = pattern; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java new file mode 100644 index 000000000000..99439770417c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the license relation. + */ +public class BitstreamLicenseProcessor extends ASignPostingProcessor + implements BitstreamSignPostingProcessor { + + private static Logger log = Logger.getLogger(BitstreamLicenseProcessor.class); + + @Autowired + private BitstreamService bitstreamService; + + public BitstreamLicenseProcessor() { + setRelation("license"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Bitstream bitstream, List linksets, + Linkset primaryLinkset) { + try { + String license = bitstreamService.getMetadata(bitstream, getMetadataField()); + if (StringUtils.isNotBlank(license)) { + if (StringUtils.isNotBlank(getPattern())) { + license = MessageFormat.format(getPattern(), license); + } + primaryLinkset.getLicense().add(new Relation(license, null)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Bitstream bitstream, List lsets) { + try { + String license = bitstreamService.getMetadata(bitstream, getMetadataField()); + if (StringUtils.isNotBlank(license)) { + if (StringUtils.isNotBlank(getPattern())) { + license = MessageFormat.format(getPattern(), license); + } + lsets.add(new Lset(license, getRelation(), buildAnchor(bitstream))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java new file mode 100644 index 000000000000..3c13b767ee0c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the linkset relation. + */ +public class BitstreamLinksetProcessor extends ASignPostingProcessor + implements BitstreamSignPostingProcessor { + + private static Logger log = Logger.getLogger(BitstreamLinksetProcessor.class); + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private ConfigurationService configurationService; + + public BitstreamLinksetProcessor() { + setRelation("linkset"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Bitstream bitstream, List linksets, + Linkset primaryLinkset) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String baseUrl = configurationService.getProperty("dspace.server.url"); + String linksetUrl = baseUrl + "/signposting/linksets/" + item.getID(); + String linksetJsonUrl = baseUrl + "/signposting/linksets/" + item.getID() + "/json"; + List relations = List.of( + new Relation(linksetUrl, "application/linkset"), + new Relation(linksetJsonUrl, "application/linkset+json") + ); + primaryLinkset.getLinkset().addAll(relations); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Bitstream bitstream, List lsets) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String baseUrl = configurationService.getProperty("dspace.server.url"); + String linksetUrl = baseUrl + "/signposting/linksets/" + item.getID(); + String linksetJsonUrl = baseUrl + "/signposting/linksets/" + item.getID() + "/json"; + List links = List.of( + new Lset(linksetUrl, getRelation(), "application/linkset", buildAnchor(bitstream)), + new Lset(linksetJsonUrl, getRelation(), "application/linkset+json", buildAnchor(bitstream)) + ); + lsets.addAll(links); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java new file mode 100644 index 000000000000..9d86856d6f90 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the collection relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class BitstreamPublicationBundaryProcessor extends ASignPostingProcessor + implements BitstreamSignPostingProcessor { + + private static Logger log = Logger.getLogger(BitstreamPublicationBundaryProcessor.class); + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private FrontendUrlService frontendUrlService; + + public BitstreamPublicationBundaryProcessor() { + setRelation("collection"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Bitstream bitstream, List linksets, + Linkset primaryLinkset) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String itemUiUrl = frontendUrlService.generateUrl(item); + primaryLinkset.getCollection().add(new Relation(itemUiUrl, "text/html")); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Bitstream bitstream, List lsets) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String itemUiUrl = frontendUrlService.generateUrl(item); + lsets.add(new Lset(itemUiUrl, getRelation(), "text/html", buildAnchor(bitstream))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java new file mode 100644 index 000000000000..5c4569e4c165 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the type relation. + */ +public class BitstreamTypeProcessor extends ASignPostingProcessor + implements BitstreamSignPostingProcessor { + + private static Logger log = Logger.getLogger(BitstreamTypeProcessor.class); + + @Autowired + private BitstreamService bitstreamService; + + public BitstreamTypeProcessor() { + setRelation("type"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Bitstream bitstream, List linksets, + Linkset primaryLinkset) { + try { + String type = bitstreamService.getMetadata(bitstream, getMetadataField()); + if (StringUtils.isNotBlank(type)) { + if (StringUtils.isNotBlank(getPattern())) { + type = MessageFormat.format(getPattern(), type); + } + primaryLinkset.getType().add(new Relation(type, null)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Bitstream bitstream, List lsets) { + try { + String type = bitstreamService.getMetadata(bitstream, getMetadataField()); + if (StringUtils.isNotBlank(type)) { + if (StringUtils.isNotBlank(getPattern())) { + type = MessageFormat.format(getPattern(), type); + } + lsets.add(new Lset(type, getRelation(), buildAnchor(bitstream))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java new file mode 100644 index 000000000000..bdaecd41f201 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import static org.dspace.content.Item.ANY; +import static org.dspace.content.MetadataSchemaEnum.PERSON; + +import java.util.List; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the author relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemAuthorProcessor extends ASignPostingProcessor + implements ItemSignPostingProcessor { + + /** log4j category */ + private static Logger log = Logger.getLogger(ItemAuthorProcessor.class); + + @Autowired + private ItemService itemService; + + @Autowired + private FrontendUrlService frontendUrlService; + + private String retrievedExternally; + + public String getRetrievedExternally() { + return retrievedExternally; + } + + public void setRetrievedExternally(String retrievedExternally) { + this.retrievedExternally = retrievedExternally; + } + + public ItemAuthorProcessor() { + setRelation("author"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Item item, List linksets, Linkset primaryLinkset) { + try { + List authors = itemService + .getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", ANY, ANY); + for (MetadataValue author : authors) { + if (author.getAuthority() != null) { + String authorUuid = author.getAuthority(); + Item authorItem = itemService.find(context, UUID.fromString(authorUuid)); + String authorOrcid = itemService + .getMetadataFirstValue(authorItem, PERSON.getName(), "identifier", "orcid", ANY); + if (StringUtils.isNotBlank(authorOrcid)) { + String href = frontendUrlService.generateUrl(authorItem); + primaryLinkset.getAuthor().add(new Relation(href, authorOrcid)); + } + } + } + } catch (Exception e) { + log.error("Problem to add signposting pattern", e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Item item, List lsets) { + try { + List authors = itemService + .getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", ANY, ANY); + for (MetadataValue author : authors) { + if (author.getAuthority() != null) { + String authorUuid = author.getAuthority(); + Item authorItem = itemService.find(context, UUID.fromString(authorUuid)); + String authorOrcid = itemService + .getMetadataFirstValue(authorItem, PERSON.getName(), "identifier", "orcid", ANY); + if (StringUtils.isNotBlank(authorOrcid)) { + String href = frontendUrlService.generateUrl(authorItem); + lsets.add(new Lset(href, getRelation(), authorOrcid, buildAnchor(context, item))); + } + } + } + } catch (Exception e) { + log.error("Problem to add signposting pattern", e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java new file mode 100644 index 000000000000..5f5f5d86b6a7 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the author relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemIdentifierProcessor extends ASignPostingProcessor + implements ItemSignPostingProcessor { + + /** + * log4j category + */ + private static Logger log = Logger.getLogger(ItemIdentifierProcessor.class); + + @Autowired + private ItemService itemService; + + public ItemIdentifierProcessor() { + setRelation("cite-as"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Item item, List linksets, Linkset primaryLinkset) { + try { + List identifiers = itemService.getMetadataByMetadataString(item, getMetadataField()); + for (MetadataValue identifier : identifiers) { + if (identifier != null) { + String identifierValue = identifier.getValue(); + if (StringUtils.isNotBlank(identifierValue)) { + if (StringUtils.isNotBlank(getPattern())) { + identifierValue = MessageFormat.format(getPattern(), identifierValue); + } + primaryLinkset.getCiteAs().add(new Relation(identifierValue, null)); + } + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Item item, List lsets) { + try { + List identifiers = itemService.getMetadataByMetadataString(item, getMetadataField()); + for (MetadataValue identifier : identifiers) { + if (identifier != null) { + String identifierValue = identifier.getValue(); + if (StringUtils.isNotBlank(identifierValue)) { + if (StringUtils.isNotBlank(getPattern())) { + identifierValue = MessageFormat.format(getPattern(), identifierValue); + } + lsets.add(new Lset(identifierValue, getRelation(), buildAnchor(context, item))); + } + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java new file mode 100644 index 000000000000..8c048a9f7c40 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.model.TypedLinkRest; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the Landing Page relation. + */ +public class ItemLandingPageProcessor extends ASignPostingProcessor implements ItemSignPostingProcessor { + + private static Logger log = Logger.getLogger(ItemLandingPageProcessor.class); + + @Autowired + private FrontendUrlService frontendUrlService; + + public ItemLandingPageProcessor() { + setRelation(TypedLinkRest.Relation.LANDING_PAGE.getName()); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Item item, List linksets, Linkset primaryLinkset) { + try { + String landingPageUrl = frontendUrlService.generateUrl(item); + primaryLinkset.getLandingPage().add(new Relation(landingPageUrl, "text/html")); + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, Item item, List lsets) { + try { + String landingPageUrl = frontendUrlService.generateUrl(item); + lsets.add(new Lset(landingPageUrl, getRelation(), "text/html", buildAnchor(context, item))); + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java new file mode 100644 index 000000000000..90177f8c2a93 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the license relation. + */ +public class ItemLicenseProcessor extends ASignPostingProcessor + implements ItemSignPostingProcessor { + + private static Logger log = Logger.getLogger(ItemLicenseProcessor.class); + + @Autowired + private ItemService itemService; + + public ItemLicenseProcessor() { + setRelation("license"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Item item, List linksets, Linkset primaryLinkset) { + try { + if (StringUtils.isNotEmpty(getMetadataField())) { + String license = itemService + .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); + if (StringUtils.isNotBlank(license)) { + if (StringUtils.isNotBlank(getPattern())) { + license = MessageFormat.format(getPattern(), license); + } + primaryLinkset.getLicense().add(new Relation(license, null)); + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Item item, List lsets) { + try { + if (StringUtils.isNotEmpty(getMetadataField())) { + String license = itemService + .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); + if (StringUtils.isNotBlank(license)) { + if (StringUtils.isNotBlank(getPattern())) { + license = MessageFormat.format(getPattern(), license); + } + lsets.add(new Lset(license, getRelation(), buildAnchor(context, item))); + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java new file mode 100644 index 000000000000..265b991521ae --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.sql.SQLException; +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the item relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemPublicationBundaryProcessor extends ASignPostingProcessor + implements ItemSignPostingProcessor { + + /** + * log4j category + */ + private static Logger log = Logger + .getLogger(ItemPublicationBundaryProcessor.class); + + @Autowired + private ConfigurationService configurationService; + + public ItemPublicationBundaryProcessor() { + setRelation("item"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Item item, List linksets, Linkset primaryLinkset) { + String url = configurationService.getProperty("dspace.ui.url"); + try { + for (Bundle bundle : item.getBundles(Constants.CONTENT_BUNDLE_NAME)) { + for (Bitstream bitstream : bundle.getBitstreams()) { + String mimeType = bitstream.getFormat(context).getMIMEType(); + primaryLinkset.getItem().add( + new Relation( + MessageFormat.format(getPattern(), + url, "bitstreams", bitstream.getID() + "/download"), mimeType)); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Item item, List lsets) { + String url = configurationService.getProperty("dspace.ui.url"); + try { + for (Bundle bundle : item.getBundles(Constants.CONTENT_BUNDLE_NAME)) { + for (Bitstream bitstream : bundle.getBitstreams()) { + String mimeType = bitstream.getFormat(context).getMIMEType(); + lsets.add(new Lset(MessageFormat.format(getPattern(), + url, "bitstreams", bitstream.getID() + "/download"), + getRelation(), mimeType, buildAnchor(context, item))); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java new file mode 100644 index 000000000000..1d5ecb0a8992 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.relation; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.Relation; +import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An implementation of {@link ItemSignPostingProcessor} for the type relation. + */ +public class ItemTypeProcessor extends ASignPostingProcessor + implements ItemSignPostingProcessor { + + private static Logger log = Logger.getLogger(ItemTypeProcessor.class); + + @Autowired + private ItemService itemService; + + public ItemTypeProcessor() { + setRelation("type"); + } + + @Override + public void buildRelation(Context context, HttpServletRequest request, + Item item, List linksets, Linkset primaryLinkset) { + try { + if (StringUtils.isNotBlank(getMetadataField())) { + String itemType = itemService + .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); + if (StringUtils.isNotBlank(itemType)) { + if (StringUtils.isNotBlank(getPattern())) { + itemType = MessageFormat.format(getPattern(), itemType); + } + primaryLinkset.getType().add(new Relation(itemType, null)); + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + @Override + public void buildLset(Context context, HttpServletRequest request, + Item item, List lsets) { + try { + if (StringUtils.isNotEmpty(getMetadataField())) { + String itemType = itemService + .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); + if (StringUtils.isNotBlank(itemType)) { + if (StringUtils.isNotBlank(getPattern())) { + itemType = MessageFormat.format(getPattern(), itemType); + } + lsets.add(new Lset(itemType, getRelation(), buildAnchor(context, item))); + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java index c2136781f927..369d56481bc7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java @@ -35,6 +35,11 @@ public class ApplicationConfig { @Value("${iiif.cors.allowed-origins}") private String[] iiifCorsAllowedOrigins; + // Allowed Signposting CORS origins ("Access-Control-Allow-Origin" header) + // Can be overridden in DSpace configuration + @Value("${signposting.cors.allowed-origins}") + private String[] signpostingCorsAllowedOrigins; + // Whether to allow credentials (cookies) in CORS requests ("Access-Control-Allow-Credentials" header) // Defaults to true. Can be overridden in DSpace configuration @Value("${rest.cors.allow-credentials:true}") @@ -45,6 +50,11 @@ public class ApplicationConfig { @Value("${iiif.cors.allow-credentials:true}") private boolean iiifCorsAllowCredentials; + // Whether to allow credentials (cookies) in CORS requests ("Access-Control-Allow-Credentials" header) + // Defaults to true. Can be overridden in DSpace configuration + @Value("${signposting.cors.allow-credentials:true}") + private boolean signpostingCorsAllowCredentials; + // Configured User Interface URL (default: http://localhost:4000) @Value("${dspace.ui.url:http://localhost:4000}") private String uiURL; @@ -90,6 +100,14 @@ public String[] getIiifAllowedOriginsConfig() { return this.iiifCorsAllowedOrigins; } + /** + * Returns the signposting.cors.allowed-origins (for Signposting access) defined in DSpace configuration. + * @return allowed origins + */ + public String[] getSignpostingAllowedOriginsConfig() { + return this.signpostingCorsAllowedOrigins; + } + /** * Return whether to allow credentials (cookies) on CORS requests. This is used to set the * CORS "Access-Control-Allow-Credentials" header in Application class. @@ -107,4 +125,13 @@ public boolean getCorsAllowCredentials() { public boolean getIiifAllowCredentials() { return iiifCorsAllowCredentials; } + + /** + * Return whether to allow credentials (cookies) on Signposting requests. This is used to set the + * CORS "Access-Control-Allow-Credentials" header in Application class. Defaults to false. + * @return true or false + */ + public boolean getSignpostingAllowCredentials() { + return signpostingCorsAllowCredentials; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java new file mode 100644 index 000000000000..13f578650516 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -0,0 +1,336 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.controller; + +import static org.dspace.content.MetadataSchemaEnum.PERSON; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.InputStream; +import java.text.MessageFormat; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.authority.Choices; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { + + private static final String doiPattern = "https://doi.org/{0}"; + private static final String doi = "10.1007/978-3-642-35233-1_18"; + + private Collection collection; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + MetadataAuthorityService metadataAuthorityService; + + @Autowired + ChoiceAuthorityService choiceAuthorityService; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + context.restoreAuthSystemState(); + } + + @Test + public void findAllItemsLinksets() throws Exception { + getClient().perform(get("/signposting")) + .andExpect(status().isMethodNotAllowed()); + } + + @Test + public void findOneItemGenericLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID())) + .andExpect(status().isMethodNotAllowed()); + } + + @Test + public void findOneItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + } + + @Test + public void findOneItemJsonLinksetsWithBitstreams() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + String bitstream2Content = "ThisIsSomeAlternativeDummyText"; + String bitstream2MimeType = "application/pdf"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstream2Content, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 2") + .withDescription("description") + .withMimeType(bitstream2MimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json") + .header("Accept", "application/linkset+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$.linkset[0].item[0].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[0].type", + Matchers.hasToString(bitstream1MimeType))) + .andExpect(jsonPath("$.linkset[0].item[1].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream2.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[1].type", + Matchers.hasToString(bitstream2MimeType))) + .andExpect(jsonPath("$.linkset[0].anchor", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].landingPage[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].landingPage[0].type", + Matchers.hasToString("text/html"))); + } + + @Test + public void findOneBitstreamJsonLinksets() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + bitstream.getID() + "/json") + .header("Accept", "application/linkset+json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneItemLsetLinksets() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String expectedResponse = "<" + MessageFormat.format(doiPattern, doi) + "> ; rel=\"cite-as\" ; anchor=\"" + + url + "/handle/" + item.getHandle() + "\" , <" + url + "/entities/publication/" + item.getID() + + "> ; rel=\"landing page\" ; type=\"text/html\" ; anchor=\"" + url + "/handle/" + item.getHandle() + + "\" , <" + url + "/bitstreams/" + bitstream1.getID() + "/download> ; rel=\"item\" ; " + + "type=\"text/plain\" ; anchor=\"" + url + "/handle/" + item.getHandle() + "\" ,"; + + getClient().perform(get("/signposting/linksets/" + item.getID()) + .header("Accept", "application/linkset")) + .andExpect(content().string(expectedResponse)); + } + + @Test + public void findTypedLinkForItem() throws Exception { + configurationService.setProperty("choices.plugin.dc.contributor.author", "SolrAuthorAuthority"); + configurationService.setProperty("authority.controlled.dc.contributor.author", "true"); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + String orcidValue = "orcidValue"; + + context.turnOffAuthorisationSystem(); + Item author = ItemBuilder.createItem(context, collection) + .withType("John") + .withMetadata(PERSON.getName(), "identifier", "orcid", orcidValue) + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .withAuthor("John", author.getID().toString(), Choices.CF_ACCEPTED) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + item.getID()) + .header("Accept", "application/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(4))) + .andExpect(jsonPath("$[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$[0].rel", + Matchers.hasToString("landing page"))) + .andExpect(jsonPath("$[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$[1].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream.getID() + "/download"))) + .andExpect(jsonPath("$[1].rel", + Matchers.hasToString("item"))) + .andExpect(jsonPath("$[1].type", + Matchers.hasToString("text/plain"))) + .andExpect(jsonPath("$[2].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$[2].rel", + Matchers.hasToString("cite-as"))) + .andExpect(jsonPath("$[3].href", + Matchers.hasToString(url + "/entities/publication/" + author.getID()))) + .andExpect(jsonPath("$[3].rel", + Matchers.hasToString("author"))) + .andExpect(jsonPath("$[3].type", + Matchers.hasToString(orcidValue))); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstream() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + String serverUrl = configurationService.getProperty("dspace.server.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID()) + .header("Accept", "application/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(3))) + .andExpect(jsonPath("$[0].href", + Matchers.hasToString(uiUrl + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$[0].rel", + Matchers.hasToString("collection"))) + .andExpect(jsonPath("$[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$[1].href", + Matchers.hasToString(serverUrl + "/signposting/linksets/" + item.getID()))) + .andExpect(jsonPath("$[1].rel", + Matchers.hasToString("linkset"))) + .andExpect(jsonPath("$[1].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$[2].href", + Matchers.hasToString(serverUrl + "/signposting/linksets/" + item.getID() + "/json"))) + .andExpect(jsonPath("$[2].rel", + Matchers.hasToString("linkset"))) + .andExpect(jsonPath("$[2].type", + Matchers.hasToString("application/linkset+json"))); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + +} diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 65b1f951fad3..f4d14eb9ee09 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1668,6 +1668,7 @@ include = ${module_dir}/orcid.cfg include = ${module_dir}/rdf.cfg include = ${module_dir}/rest.cfg include = ${module_dir}/iiif.cfg +include = ${module_dir}/signposting.cfg include = ${module_dir}/solr-statistics.cfg include = ${module_dir}/solrauthority.cfg include = ${module_dir}/researcher-profile.cfg diff --git a/dspace/config/modules/signposting.cfg b/dspace/config/modules/signposting.cfg new file mode 100644 index 000000000000..2265294981a2 --- /dev/null +++ b/dspace/config/modules/signposting.cfg @@ -0,0 +1,29 @@ +#---------------------------------------------------------------# +#------------------SIGNPOSTING CONFIGURATIONS-------------------# + +# Allowed Cross-Origin-Resource-Sharing (CORS) origins (in "Access-Control-Allow-Origin" header). +# Only these origins (client URLs) can successfully authenticate with your REST API. +# Defaults to ${dspace.ui.url} if unspecified (as the UI must have access to the REST API). +# Multiple allowed origin URLs may be comma separated. Wildcard value (*) is NOT SUPPORTED. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.cors.allowed-origins = ${dspace.ui.url} + +# Whether or not to allow credentials (e.g. cookies) sent by the client/browser in CORS +# requests (in "Access-Control-Allow-Credentials" header). +# For DSpace, we default this to "true" to support external authentication via Shibboleth (and similar). +# However, if any of the "allowed-origins" above are *not* trusted, you may choose to set this to "false" +# for additional security. Defaults to "true" if unspecified. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.cors.allow-credentials = true + +# Path where signposting controller is available +# Defaults to "signposting", which means the signposting controller would be available +# at ${dspace.server.url}/signposting +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.path = signposting + +# Whether or not to enable the signposting controller +# When "true", the signposting controller is accessible on ${signposting.path} +# When "false" or commented out, signposting is disabled/inaccessible. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.enabled = true \ No newline at end of file diff --git a/dspace/config/spring/api/core-services.xml b/dspace/config/spring/api/core-services.xml index 212237585ea2..3ede01647c3d 100644 --- a/dspace/config/spring/api/core-services.xml +++ b/dspace/config/spring/api/core-services.xml @@ -121,6 +121,8 @@ + + diff --git a/dspace/config/spring/rest/signposting.xml b/dspace/config/spring/rest/signposting.xml new file mode 100644 index 000000000000..33e73f3b77e1 --- /dev/null +++ b/dspace/config/spring/rest/signposting.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From bc73e312e028962eb914cd1cecc91e85fb4524aa Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Fri, 5 May 2023 18:45:52 +0200 Subject: [PATCH 167/686] 100553: Added test for create metadata schema & field and created test for sort byFieldName --- .../MetadataFieldRestRepository.java | 19 ++- .../MetadataSchemaRestRepository.java | 6 +- .../rest/MetadataSchemaRestRepositoryIT.java | 23 +++- .../rest/MetadatafieldRestRepositoryIT.java | 114 +++++++++++++++++- 4 files changed, 149 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index c185e8334219..eefd6331d116 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -14,6 +14,7 @@ import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Objects; import javax.servlet.http.HttpServletRequest; @@ -45,10 +46,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; - /** * This is the repository responsible to manage MetadataField Rest object * @@ -213,7 +214,13 @@ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, St DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()])); - discoverQuery.setSortField("fieldName_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Sort.Order order = orderIterator.next(); + discoverQuery.setSortField(order.getProperty() + "_sort", + order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc : + DiscoverQuery.SORT_ORDER.desc); + } discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); return discoverQuery; @@ -254,13 +261,13 @@ protected MetadataFieldRest createAndReturn(Context context) if (isBlank(metadataFieldRest.getElement())) { throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); } else if (metadataFieldRest.getElement().contains(".")) { - throw new DSpaceBadRequestException("metadata element (in request body) cannot contain dots"); + throw new UnprocessableEntityException("metadata element (in request body) cannot contain dots"); } if (isBlank(metadataFieldRest.getQualifier())) { metadataFieldRest.setQualifier(null); } else if (metadataFieldRest.getQualifier().contains(".")) { - throw new DSpaceBadRequestException("metadata qualifier (in request body) cannot contain dots"); + throw new UnprocessableEntityException("metadata qualifier (in request body) cannot contain dots"); } // create @@ -310,12 +317,12 @@ protected MetadataFieldRest put(Context context, HttpServletRequest request, Str try { metadataFieldRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataFieldRest.class); } catch (JsonProcessingException e) { - throw new UnprocessableEntityException("Cannot parse JSON in request body", e); + throw new DSpaceBadRequestException("Cannot parse JSON in request body", e); } MetadataField metadataField = metadataFieldService.find(context, id); if (metadataField == null) { - throw new ResourceNotFoundException("metadata field with id: " + id + " not found"); + throw new UnprocessableEntityException("metadata field with id: " + id + " not found"); } if (!Objects.equals(metadataFieldRest.getElement(), metadataField.getElement())) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java index c45ac9821ff6..ba49a839bbe5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java @@ -93,11 +93,11 @@ protected MetadataSchemaRest createAndReturn(Context context) // validate fields if (isBlank(metadataSchemaRest.getPrefix())) { throw new UnprocessableEntityException("metadata schema name cannot be blank"); + } else if (metadataSchemaRest.getPrefix().contains(".")) { + throw new UnprocessableEntityException("metadata schema namespace cannot contain dots"); } if (isBlank(metadataSchemaRest.getNamespace())) { throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); - } else if (metadataSchemaRest.getNamespace().contains(".")) { - throw new UnprocessableEntityException("metadata schema namespace cannot contain dots"); } // create @@ -144,7 +144,7 @@ protected MetadataSchemaRest put(Context context, HttpServletRequest request, St try { metadataSchemaRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataSchemaRest.class); } catch (JsonProcessingException e) { - throw new UnprocessableEntityException("Cannot parse JSON in request body", e); + throw new DSpaceBadRequestException("Cannot parse JSON in request body", e); } MetadataSchema metadataSchema = metadataSchemaService.find(context, id); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index 02d51fb443af..4017d5f2da63 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -88,7 +88,7 @@ public void createSuccess() throws Exception { context.turnOffAuthorisationSystem(); MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") - .build(); + .build(); context.restoreAuthSystemState(); MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); @@ -116,6 +116,27 @@ public void createSuccess() throws Exception { } } + @Test + public void createUnprocessableEntity_prefixContainingDots() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") + .build(); + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); + metadataSchemaRest.setPrefix("test.SchemaName"); + metadataSchemaRest.setNamespace(TEST_NAMESPACE); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedTest() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index b85dade17043..da8dacb46935 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -9,6 +9,7 @@ import static com.jayway.jsonpath.JsonPath.read; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -564,6 +565,70 @@ public void findByFieldName_exactName_combinedDiscoveryQueryParams_qualifier() t .andExpect(status().isUnprocessableEntity()); } + @Test + public void findByFieldName_sortByFieldNameASC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,ASC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField2), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField1) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + + @Test + public void findByFieldName_sortByFieldNameDESC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,DESC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField1), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField2) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + @Test public void createSuccess() throws Exception { @@ -575,7 +640,8 @@ public void createSuccess() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -606,7 +672,8 @@ public void createBlankQualifier() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); Integer id = null; try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + null), nullValue()); id = read( getClient(authToken) @@ -641,7 +708,8 @@ public void create_checkAddedToIndex() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -689,6 +757,46 @@ public void createUnauthorized() throws Exception { .andExpect(status().isUnauthorized()); } + @Test + public void createUnprocessableEntity_elementContainingDots() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement("testElement.ForCreate"); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void createUnprocessableEntity_qualifierContainingDots() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier("testQualifier.ForCreate"); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedEPersonNoAdminRights() throws Exception { From 2dfc373ad1c8e95146121e0c19f1e740879cb00b Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Sat, 29 Apr 2023 19:24:14 +0200 Subject: [PATCH 168/686] Fixed communities sub-communities and communities collections sorting --- .../CommunityCollectionLinkRepository.java | 10 +++ .../CommunitySubcommunityLinkRepository.java | 10 +++ .../CommunityCollectionLinkRepositoryIT.java | 82 +++++++++++++++++++ ...CommunitySubcommunityLinkRepositoryIT.java | 80 ++++++++++++++++++ 4 files changed, 182 insertions(+) create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java index c77dcf18dc7b..3c728d8c31b9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -31,6 +32,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -72,6 +74,14 @@ public Page getCollections(@Nullable HttpServletRequest request, discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCol : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java index c211810d11f9..135d964f3f42 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -29,6 +30,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -68,6 +70,14 @@ public Page getSubcommunities(@Nullable HttpServletRequest reques discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCommunities : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java new file mode 100644 index 000000000000..24a94a4d4bb7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CollectionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunityCollectionLinkRepository} + */ +public class CommunityCollectionLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Collection collection1; + Collection collection2; + Collection collection3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + collection1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + collection2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + collection3 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getCollections_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection1), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection3) + ))); + } + + @Test + public void getCollections_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection3), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java new file mode 100644 index 000000000000..aa3b1c072187 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CommunityMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunitySubcommunityLinkRepository} + */ +public class CommunitySubcommunityLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Community subCommunity1; + Community subCommunity2; + Community subCommunity3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + subCommunity1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 1") + .build(); + subCommunity2 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 2") + .build(); + subCommunity3 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getSubCommunities_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity1), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity3) + ))); + } + + @Test + public void getSubCommunities_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity3), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity1) + ))); + } + +} From 0783f7ff8285a9f0701a1ae125f5d85d348b23f4 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 08:37:55 +0200 Subject: [PATCH 169/686] DS-8636 - all files option fixed --- .../dspace/app/requestitem/RequestItemEmailNotifier.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 61b42fd185e2..4171744c5eb2 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,9 +167,12 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), - bitstream.getFormat(context).getMIMEType()); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment(bitstreamService.retrieve(context, + bitstream), bitstream.getName(), + bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } From 3c37dd45193304ffccac926cfa6a05f714610b40 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 08:44:49 +0200 Subject: [PATCH 170/686] ds-8636 - indentation --- .../app/requestitem/RequestItemEmailNotifier.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 4171744c5eb2..c804ac71d9e2 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,12 +167,12 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace - context.turnOffAuthorisationSystem(); - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), - bitstream.getFormat(context).getMIMEType()); - context.restoreAuthSystemState(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment(bitstreamService.retrieve(context, + bitstream), bitstream.getName(), + bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } From 909003bfac9945d100f1d9b4a8d4be5881088376 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 09:01:05 +0200 Subject: [PATCH 171/686] Fix style --- .../org/dspace/app/requestitem/RequestItemEmailNotifier.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index c804ac71d9e2..caefb4bf402c 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,7 +167,8 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + // #8636 Anyone receiving the email can respond to the + // request without authenticating into DSpace context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), From 6b10fc3783055bba4fbd291726d574752b22aca3 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 09:15:05 +0200 Subject: [PATCH 172/686] Fix Style --- .../org/dspace/app/requestitem/RequestItemEmailNotifier.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index caefb4bf402c..8d1959754723 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,7 +167,7 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - // #8636 Anyone receiving the email can respond to the + // #8636 Anyone receiving the email can respond to the // request without authenticating into DSpace context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, From 896cffda7da45b2e825b7b7c2a4a4f2c05853b25 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Mon, 8 May 2023 11:48:56 -0400 Subject: [PATCH 173/686] Get request-copy response email body from backend templates, as with all other emails. There is a considerable quantity of other changes here which were made to facilitate testing by making the affected class a bean and injecting the desired "strategy" class programmatically in the test. --- .../requestitem/RequestItemEmailNotifier.java | 81 +++++--- .../dspace/app/requestitem/package-info.java | 15 +- ...ring-dspace-addon-requestitem-services.xml | 11 +- .../requestitem/JavaMailTestTransport.java | 65 +++++++ .../RequestItemEmailNotifierTest.java | 173 ++++++++++++++++++ .../repository/RequestItemRepository.java | 14 +- .../app/rest/utils/ApplicationConfig.java | 18 +- 7 files changed, 328 insertions(+), 49 deletions(-) create mode 100644 dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java create mode 100644 dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 384f33decaf2..d3813500911d 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -11,55 +11,57 @@ import java.io.IOException; import java.sql.SQLException; import java.util.List; +import javax.annotation.ManagedBean; +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Singleton; import javax.mail.MessagingException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.app.requestitem.factory.RequestItemServiceFactory; import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; -import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; /** * Send item requests and responses by email. * + *

The "strategy" by which approvers are chosen is in an implementation of + * {@link RequestItemAuthorExtractor} which is injected by the name + * {@code requestItemAuthorExtractor}. See the DI configuration documents. + * * @author Mark H. Wood */ +@Singleton +@ManagedBean public class RequestItemEmailNotifier { private static final Logger LOG = LogManager.getLogger(); - private static final BitstreamService bitstreamService - = ContentServiceFactory.getInstance().getBitstreamService(); - - private static final ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); + @Inject + protected BitstreamService bitstreamService; - private static final HandleService handleService - = HandleServiceFactory.getInstance().getHandleService(); + @Inject + protected ConfigurationService configurationService; - private static final RequestItemService requestItemService - = RequestItemServiceFactory.getInstance().getRequestItemService(); + @Inject + protected HandleService handleService; - private static final RequestItemAuthorExtractor requestItemAuthorExtractor - = DSpaceServicesFactory.getInstance() - .getServiceManager() - .getServiceByName("requestItemAuthorExtractor", - RequestItemAuthorExtractor.class); + @Inject + protected RequestItemService requestItemService; - private RequestItemEmailNotifier() {} + @Inject + @Named("requestItemAuthorExtractor") // alias for selected strategy bean + protected RequestItemAuthorExtractor requestItemAuthorExtractor; /** * Send the request to the approver(s). @@ -70,7 +72,7 @@ private RequestItemEmailNotifier() {} * @throws IOException passed through. * @throws SQLException if the message was not sent. */ - static public void sendRequest(Context context, RequestItem ri, String responseLink) + public void sendRequest(Context context, RequestItem ri, String responseLink) throws IOException, SQLException { // Who is making this request? List authors = requestItemAuthorExtractor @@ -147,12 +149,38 @@ static public void sendRequest(Context context, RequestItem ri, String responseL * @param message email body (may be empty). * @throws IOException if sending failed. */ - static public void sendResponse(Context context, RequestItem ri, String subject, + public void sendResponse(Context context, RequestItem ri, String subject, String message) throws IOException { + // Who granted this request? + List grantors; + try { + grantors = requestItemAuthorExtractor.getRequestItemAuthor(context, ri.getItem()); + } catch (SQLException e) { + LOG.warn("Failed to get grantor's name and address: {}", e.getMessage()); + grantors = List.of(); + } + + String grantorName; + String grantorAddress; + if (grantors.isEmpty()) { + grantorName = configurationService.getProperty("mail.admin.name"); + grantorAddress = configurationService.getProperty("mail.admin"); + } else { + RequestItemAuthor grantor = grantors.get(0); // XXX Cannot know which one + grantorName = grantor.getFullName(); + grantorAddress = grantor.getEmail(); + } + // Build an email back to the requester. - Email email = new Email(); - email.setContent("body", message); + Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), + "request_item.granted")); + email.addArgument(ri.getReqName()); // {0} requestor's name + email.addArgument(ri.getItem().getID().toString()); // {1} URL of the requested Item + email.addArgument(ri.getItem().getName()); // {2} title of the requested Item + email.addArgument(grantorName); // {3} name of the grantor + email.addArgument(grantorAddress); // {4} email of the grantor + email.addArgument(message); // {5} grantor's optional message email.setSubject(subject); email.addRecipient(ri.getReqEmail()); // Attach bitstreams. @@ -167,8 +195,9 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), + email.addAttachment( + bitstreamService.retrieve(context, bitstream), + bitstream.getName(), bitstream.getFormat(context).getMIMEType()); } } @@ -207,7 +236,7 @@ static public void sendResponse(Context context, RequestItem ri, String subject, * @throws IOException if the message body cannot be loaded or the message * cannot be sent. */ - static public void requestOpenAccess(Context context, RequestItem ri) + public void requestOpenAccess(Context context, RequestItem ri) throws IOException { Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "request_item.admin")); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java index 5886f16fde1a..fa7c15b23060 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java @@ -12,10 +12,15 @@ * e-mailed to a responsible party for consideration and action. Find details * in the user documentation under the rubric "Request a Copy". * - *

This package includes several "strategy" classes which discover responsible - * parties in various ways. See {@link RequestItemSubmitterStrategy} and the - * classes which extend it. A strategy class must be configured and identified - * as {@link RequestItemAuthorExtractor} for injection into code which requires - * Request a Copy services. + *

Mailing is handled by {@link RequestItemEmailNotifier}. Responsible + * parties are represented by {@link RequestItemAuthor} + * + *

This package includes several "strategy" classes which discover + * responsible parties in various ways. See + * {@link RequestItemSubmitterStrategy} and the classes which extend it, and + * others which implement {@link RequestItemAuthorExtractor}. A strategy class + * must be configured and identified as {@link requestItemAuthorExtractor} + * (note capitalization) for injection into code which requires Request + * a Copy services. */ package org.dspace.app.requestitem; diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml index b9c11f8164d6..b8ce542ffcac 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml @@ -14,16 +14,17 @@ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd http://www.springframework.org/schema/context - http://www.springframework.org/schema/context/spring-context-2.5.xsd" - default-autowire-candidates="*Service,*DAO,javax.sql.DataSource"> + http://www.springframework.org/schema/context/spring-context-2.5.xsd"> - + - - + + + This sends various emails between the requestor and the grantor. + + + + - + id="org.dspace.app.requestitem.RequestItemMetadataStrategy"> + + Get recipients from an item metadata field. + - + id="org.dspace.app.requestitem.RequestItemHelpdeskStrategy"> + + HelpDesk to instead get RequestItem emails + + + id='org.dspace.app.requestitem.CollectionAdministratorsRequestItemStrategy'> - Send request emails to administrators of an Item's owning - Collection. + Send request emails to administrators of an Item's owning + Collection. + id='org.dspace.app.requestitem.CombiningRequestItemStrategy'> - Execute multiple strategies and concatenate their lists of - recipients. Mail will go to all members of the combined list. + Execute multiple strategies and concatenate their lists of + recipients. Mail will go to all members of the combined list. - A list of RequestItemAuthorExtractor beans + A list of RequestItemAuthorExtractor beans - - - - - - - - - diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java index d88e775f8bbe..492d650cab0f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java @@ -23,14 +23,18 @@ * @author Tim Donohue */ @Configuration +// Component scanning ignores any parent {@code ApplicationContext}s, so any +// bean which is in the scope of both will be duplicated. dspace-services makes +// its context the parent of this one. If a bean is explicitly configured in +// the parent, it won't be so configured in this context and you may have +// trouble. IOW be careful what you add here. @ComponentScan( { "org.dspace.app.rest.converter", "org.dspace.app.rest.repository", "org.dspace.app.rest.utils", "org.dspace.app.configuration", "org.dspace.iiif", - "org.dspace.app.iiif", - "org.dspace.app.requestitem" + "org.dspace.app.iiif" }) public class ApplicationConfig { // Allowed CORS origins ("Access-Control-Allow-Origin" header) diff --git a/dspace/config/spring/api/requestitem.xml b/dspace/config/spring/api/requestitem.xml index 63bbacd49bf8..e98ec07a1f41 100644 --- a/dspace/config/spring/api/requestitem.xml +++ b/dspace/config/spring/api/requestitem.xml @@ -17,9 +17,10 @@ meld the lists from two or more other strategies. - + - + This sends various emails between the requestor and the grantor. From 4e64afbe3b3f57986fe8a635db7254ada7c12cff Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Thu, 11 May 2023 12:04:20 +0300 Subject: [PATCH 181/686] bulk access controll --- .../bulkaccesscontrol/BulkAccessControl.java | 456 ++++++++++++++++++ .../BulkAccessControlCli.java | 18 + ...lkAccessControlCliScriptConfiguration.java | 19 + .../BulkAccessControlScriptConfiguration.java | 113 +++++ .../exception/BulkAccessControlException.java | 48 ++ .../model/AccessCondition.java | 54 +++ .../model/AccessConditionBitstream.java | 60 +++ .../model/AccessConditionItem.java | 36 ++ .../model/AccessControl.java | 40 ++ .../authorize/ResourcePolicyServiceImpl.java | 9 + .../authorize/dao/ResourcePolicyDAO.java | 3 + .../dao/impl/ResourcePolicyDAOImpl.java | 13 + .../service/ResourcePolicyService.java | 3 + .../org/dspace/content/ItemServiceImpl.java | 33 +- .../dspace/content/service/ItemService.java | 19 + .../submit/model/AccessConditionOption.java | 2 +- .../config/spring/api/scripts.xml | 5 + .../BulkAccessControlIT.java | 437 +++++++++++++++++ .../app/matcher/ResourcePolicyMatcher.java | 126 +++++ .../config/spring/rest/scripts.xml | 5 + dspace/config/spring/api/scripts.xml | 5 + dspace/config/spring/rest/scripts.xml | 6 + 22 files changed, 1505 insertions(+), 5 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java create mode 100644 dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java create mode 100644 dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java new file mode 100644 index 000000000000..dc3a6da56ba4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -0,0 +1,456 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException; +import org.dspace.app.bulkaccesscontrol.model.AccessCondition; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream; +import org.dspace.app.bulkaccesscontrol.model.AccessControl; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.model.AccessConditionOption; +import org.dspace.util.MultiFormatDateParser; +import org.dspace.utils.DSpace; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControl extends DSpaceRunnable> { + + private static final Logger LOGGER = LoggerFactory.getLogger(BulkAccessControl.class); + + private DSpaceObjectUtils dSpaceObjectUtils; + + private SearchService searchService; + + private IndexObjectFactoryFactory indexObjectServiceFactory; + + private CommunityService communityService; + + private CollectionService collectionService; + + private ConfigurationService configurationService; + + private ItemService itemService; + + private AuthorizeService authorizeService; + + private String filename; + + private String[] uuids; + + private String [] targetUuids; + + private Context context; + + private BundleService bundleService; + + private BitstreamService bitstreamService; + + private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService; + + private ResourcePolicyService resourcePolicyService; + + private Map itemAccessConditions; + + private Map uploadAccessConditions; + + @Override + @SuppressWarnings("unchecked") + public void setup() throws ParseException { + + this.searchService = SearchUtils.getSearchService(); + this.indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance(); + this.communityService = ContentServiceFactory.getInstance().getCommunityService(); + this.collectionService = ContentServiceFactory.getInstance().getCollectionService(); + this.itemService = ContentServiceFactory.getInstance().getItemService(); + this.authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + this.bundleService = ContentServiceFactory.getInstance().getBundleService(); + this.bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName( + "bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class); + this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + BulkAccessConditionConfiguration bulkAccessConditionConfiguration = + bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default"); + + itemAccessConditions = bulkAccessConditionConfiguration + .getItemAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + uploadAccessConditions = bulkAccessConditionConfiguration + .getBitstreamAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + filename = commandLine.getOptionValue('f'); + uuids = commandLine.getOptionValues('u'); + } + + @Override + public void internalRun() throws Exception { + ObjectMapper mapper = new ObjectMapper(); + AccessControl accessControl; + context = new Context(Context.Mode.BATCH_EDIT); + assignCurrentUserInContext(); + assignSpecialGroupsInContext(); + + context.turnOffAuthorisationSystem(); + + if (uuids == null) { + handler.logError("A target uuid must be provided (run with -h flag for details)"); + throw new IllegalArgumentException("A target uuid must be provided"); + } else if (uuids.length <= 0) { + handler.logError("A target uuid must be provided with at least on uuid"); + throw new IllegalArgumentException("A target uuid must be provided with at least on uuid"); + } + + InputStream inputStream = handler.getFileStream(context, filename) + .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + + "found for filename: " + filename)); + + try { + accessControl = mapper.readValue(inputStream, AccessControl.class); + } catch (IOException e) { + handler.logError("Error parsing json file"); + throw new IllegalArgumentException("Error parsing json file", e); + } + + try { + validate(accessControl); + updateItemsAndBitstreamsPolices(accessControl); + context.complete(); + context.restoreAuthSystemState(); + } catch (Exception e) { + handler.handleException(e); + context.abort(); + } + } + + private void validate(AccessControl accessControl) { + + if (Objects.isNull(accessControl.getItem()) && Objects.isNull(accessControl.getBitstream())) { + handler.logError("item or bitstream nodes must be provided"); + } + + if (Objects.nonNull(accessControl.getItem())) { + if (StringUtils.isNotEmpty(accessControl.getItem().getMode()) && + !(accessControl.getItem().getMode().equals("add") || + accessControl.getItem().getMode().equals("replace"))) { + handler.logError("wrong value for item mode<" + accessControl.getItem().getMode() + ">"); + } + + for (AccessCondition accessCondition : accessControl.getItem().getAccessConditions()) { + validateAccessCondition(accessCondition); + } + } + + if (Objects.nonNull(accessControl.getBitstream())) { + if (StringUtils.isNotEmpty(accessControl.getBitstream().getMode()) && + !(accessControl.getBitstream().getMode().equals("add") || + accessControl.getBitstream().getMode().equals("replace"))) { + handler.logError("wrong value for bitstream mode<" + accessControl.getBitstream().getMode() + ">"); + } + + for (AccessCondition accessCondition : accessControl.getBitstream().getAccessConditions()) { + validateAccessCondition(accessCondition); + } + } + } + + private void validateAccessCondition(AccessCondition accessCondition) { + + if (!itemAccessConditions.containsKey(accessCondition.getName())) { + handler.logError("wrong access condition <" + accessCondition.getName() + ">"); + throw new IllegalArgumentException("wrong access condition <" + accessCondition.getName() + ">"); + } + + try { + itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy( + context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate()); + } catch (Exception e) { + handler.logError("invalid access condition"); + handler.handleException(e); + } + } + + public void updateItemsAndBitstreamsPolices(AccessControl accessControl) + throws SQLException, SearchServiceException, AuthorizeException { + + int counter = 0; + int start = 0; + int limit = 20; + + String query = buildSolrQuery(uuids); + + Iterator itemIterator = findItems(query, start, limit); + + while (itemIterator.hasNext()) { + + Item item = itemIterator.next(); + + if (Objects.nonNull(accessControl.getItem())) { + updateItemPolicies(item, accessControl); + } + + if (Objects.nonNull(accessControl.getBitstream())) { + updateBitstreamsPolicies(item, accessControl); + } + + context.commit(); + context.uncacheEntity(item); + + counter++; + + if (counter == limit) { + start += limit; + itemIterator = findItems(query, start, limit); + } + } + } + + private String buildSolrQuery(String[] uuids) throws SQLException { + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + String [] query = new String[uuids.length]; + for (int i = 0 ; i < query.length ; i++) { + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids[i])); + if (dso.getType() == Constants.COMMUNITY) { + query[i] = "location.comm:" + dso.getID(); + } else if (dso.getType() == Constants.COLLECTION) { + query[i] = "location.coll:" + dso.getID(); + } else if (dso.getType() == Constants.ITEM) { + query[i] = "search.resourceid:" + dso.getID(); + } + } + return StringUtils.joinWith(" OR ", query); + } + + private Iterator findItems(String query, int start, int limit) + throws SearchServiceException { + + DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()) + .iterator(); + } + + private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + discoverQuery.setStart(start); + discoverQuery.setMaxResults(limit); + + return discoverQuery; + } + + private void updateItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { + + if ("replace".equals(accessControl.getItem().getMode())) { + removeReadPolicies(item, TYPE_CUSTOM); + removeReadPolicies(item, TYPE_INHERITED); + } + + setItemPolicies(item, accessControl); + } + + private void setItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { + if (isAppendModeDisabled() && item.isArchived()) { + // change to add + itemService.adjustItemPolicies(context, item, item.getOwningCollection()); + } + + accessControl + .getItem() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(item, accessCondition, + itemAccessConditions.get(accessCondition.getName()))); + } + + private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { + + if (containsConstraints(accessControl)) { + findMatchedBitstreams(item, accessControl.getBitstream().getConstraint().getUuids()) + .forEach(bitstream -> + updateBitstreamPolicies(bitstream, item, accessControl)); + } else { + item.getBundles() + .stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .forEach(bitstream -> + updateBitstreamPolicies(bitstream, item, accessControl)); + } + } + + private boolean containsConstraints(AccessControl accessControl) { + AccessConditionBitstream controlBitstream = accessControl.getBitstream(); + + return Objects.nonNull(controlBitstream) && + Objects.nonNull(controlBitstream.getConstraint()) && + isNotEmpty(controlBitstream.getConstraint().getUuids()); + } + + private List findMatchedBitstreams(Item item, List uuids) { + return item.getBundles().stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> uuids.contains(bitstream.getID().toString())) + .collect(Collectors.toList()); + } + + private void updateBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) { + + if ("replace".equals(accessControl.getBitstream().getMode())) { + removeReadPolicies(bitstream, TYPE_CUSTOM); + removeReadPolicies(bitstream, TYPE_INHERITED); + } + + try { + setBitstreamPolicies(bitstream, item, accessControl); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + } + + private void removeReadPolicies(DSpaceObject dso, String type) { + try { + resourcePolicyService.removePolicies(context, dso, type, Constants.READ); + } catch (SQLException | AuthorizeException e) { + throw new BulkAccessControlException(e); + } + } + + private void setBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) + throws SQLException, AuthorizeException { + if (isAppendModeDisabled() && item.isArchived()) { + itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); + } + + accessControl.getBitstream() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition, + uploadAccessConditions.get(accessCondition.getName()))); + } + + private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, + AccessConditionOption AccessConditionOption) { + + String name = accessCondition.getName(); + String description = accessCondition.getDescription(); + Date startDate = accessCondition.getStartDate(); + Date endDate = accessCondition.getEndDate(); + + try { + AccessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + +// private void rollback() { +// try { +// context.rollback(); +// } catch (SQLException e) { +// throw new SQLRuntimeException(e); +// } +// } + + private void assignCurrentUserInContext() throws SQLException { + UUID uuid = getEpersonIdentifier(); + if (uuid != null) { + EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); + context.setCurrentUser(ePerson); + } + } + + private void assignSpecialGroupsInContext() throws SQLException { + for (UUID uuid : handler.getSpecialGroups()) { + context.setSpecialGroup(uuid); + } + } + + private Date parseDate(String date) { + return MultiFormatDateParser.parse(date); + } + + private boolean isAppendModeDisabled() { + return !configurationService.getBooleanProperty( + "core.authorization.installitem.inheritance-read.append-mode"); + } + + @Override + @SuppressWarnings("unchecked") + public BulkAccessControlScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("bulk-access-control",BulkAccessControlScriptConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java new file mode 100644 index 000000000000..054c642298d4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java @@ -0,0 +1,18 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +/** + * Extension of {@link BulkAccessControl} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCli extends BulkAccessControl { + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java new file mode 100644 index 000000000000..25b631c51f46 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java @@ -0,0 +1,19 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +/** + * Extension of {@link BulkAccessControlScriptConfiguration} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCliScriptConfiguration + extends BulkAccessControlScriptConfiguration { + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java new file mode 100644 index 000000000000..edda394c8237 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -0,0 +1,113 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.cli.Options; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.utils.DSpace; + +/** + * Script configuration for {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + * @param the {@link BulkAccessControl} type + */ +public class BulkAccessControlScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + + try { + if (Objects.isNull(commandLineParameters)) { + throw new IllegalArgumentException(); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-u"::equals)) { + throw new IllegalArgumentException(); + } else { + List dspaceObjectIDs = + commandLineParameters.stream() + .filter(parameter -> "-u".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .collect(Collectors.toList()); + + DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + for (String dspaceObjectID : dspaceObjectIDs) { + + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID)); + + if (Objects.isNull(dso)) { + throw new IllegalArgumentException(); + } + + if (!authorizeService.isAdmin(context, dso)) { + return false; + } + } + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + + return true; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("u", "target", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this + * BulkImportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java new file mode 100644 index 000000000000..092611eb0654 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.exception; + +/** + * Exception for errors that occurs during the bulk access control + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlException extends RuntimeException { + + private static final long serialVersionUID = -74730626862418515L; + + /** + * Constructor with error message and cause. + * + * @param message the error message + * @param cause the error cause + */ + public BulkAccessControlException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructor with error message. + * + * @param message the error message + */ + public BulkAccessControlException(String message) { + super(message); + } + + /** + * Constructor with error cause. + * + * @param cause the error cause + */ + public BulkAccessControlException(Throwable cause) { + super(cause); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java new file mode 100644 index 000000000000..40ea2d0e25f4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.Date; + +/** + * Class that model the value of ACCESS_CONDITION_CELL + * of sheet BITSTREAM_METADATA of the Bulk import excel. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessCondition { + + private String name; + + private String description; + + private Date startDate; + + private Date endDate; + + public AccessCondition() { + } + + public AccessCondition(String name, String description, Date startDate, Date endDate) { + this.name = name; + this.description = description; + this.startDate = startDate; + this.endDate = endDate; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public Date getStartDate() { + return startDate; + } + + public Date getEndDate() { + return endDate; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java new file mode 100644 index 000000000000..0e5ee47d4ec7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -0,0 +1,60 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.List; + +/** + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionBitstream { + + private String mode; + + private Constraint constraint; + + private List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public Constraint getConstraint() { + return constraint; + } + + public void setConstraint(Constraint constraint) { + this.constraint = constraint; + } + + public List getAccessConditions() { + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } + + public class Constraint { + + private List uuids; + + public List getUuids() { + return uuids; + } + + public void setUuids(List uuids) { + this.uuids = uuids; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java new file mode 100644 index 000000000000..6176cc220c52 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.List; + +/** + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionItem { + + String mode; + + List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public List getAccessConditions() { + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java new file mode 100644 index 000000000000..8e3c11001fa8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java @@ -0,0 +1,40 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +public class AccessControl { + + AccessConditionItem item; + + AccessConditionBitstream bitstream; + + public AccessControl() { + } + + public AccessControl(AccessConditionItem item, + AccessConditionBitstream bitstream) { + this.item = item; + this.bitstream = bitstream; + } + + public AccessConditionItem getItem() { + return item; + } + + public void setItem(AccessConditionItem item) { + this.item = item; + } + + public AccessConditionBitstream getBitstream() { + return bitstream; + } + + public void setBitstream(AccessConditionBitstream bitstream) { + this.bitstream = bitstream; + } +} diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java index 4a2addf781b9..b762107a84c5 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java @@ -232,6 +232,15 @@ public void removePolicies(Context c, DSpaceObject o, String type) throws SQLExc c.restoreAuthSystemState(); } + @Override + public void removePolicies(Context c, DSpaceObject o, String type, int action) + throws SQLException, AuthorizeException { + resourcePolicyDAO.deleteByDsoAndTypeAndAction(c, o, type, action); + c.turnOffAuthorisationSystem(); + contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o); + c.restoreAuthSystemState(); + } + @Override public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group) throws SQLException, AuthorizeException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java index 5c898a5bca61..4e12cd0bfd66 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java @@ -39,6 +39,9 @@ public List findByDsoAndType(Context context, DSpaceObject dSpac public List findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action) + throws SQLException; + public List findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java index 651c1ad63b6d..26b6bb1d7345 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java @@ -103,6 +103,19 @@ public List findByDSoAndAction(Context context, DSpaceObject dso return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); } + @Override + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId) + throws SQLException { + String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId " + + "AND rptype = :rptype AND actionId= :actionId"; + Query query = createQuery(context, queryString); + query.setParameter("dsoId", dso.getID()); + query.setParameter("rptype", type); + query.setParameter("actionId", actionId); + query.executeUpdate(); + + } + @Override public List findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java index 726078d74382..43735fcd6089 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java @@ -93,6 +93,9 @@ public List findByTypeGroupActionExceptId(Context context, DSpac public void removePolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException; + public void removePolicies(Context c, DSpaceObject o, String type, int action) + throws SQLException, AuthorizeException; + public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group) throws SQLException, AuthorizeException; diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index de5826fa015b..d529df9ca01d 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -940,14 +940,39 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection for (Bitstream bitstream : mybundle.getBitstreams()) { // if come from InstallItem: remove all submission/workflow policies - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); - addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); - addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionPolicies); } } } + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream) + throws SQLException, AuthorizeException { + List defaultCollectionPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); + + List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, + ResourcePolicy.TYPE_CUSTOM); + if (defaultCollectionPolicies.size() < 1) { + throw new SQLException("Collection " + collection.getID() + + " (" + collection.getHandle() + ")" + + " has no default bitstream READ policies"); + } + + // remove all policies from bitstream, add new ones + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionPolicies); + } + + private void removeAllPoliciesAndAddDefault(Context context, Bitstream bitstream, + List defaultItemPolicies, + List defaultCollectionPolicies) + throws SQLException, AuthorizeException { + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); + addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); + addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + } + @Override public void adjustItemPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index b7a479469bf1..02d83808c1ff 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -507,6 +507,25 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException; + /** + * Adjust the Bundle and Bitstream policies to reflect what have been defined + * during the submission/workflow. The temporary SUBMISSION and WORKFLOW + * policies are removed and the policies defined at the item and collection + * level are copied and inherited as appropriate. Custom selected Item policies + * are copied to the bundle/bitstream only if no explicit custom policies were + * already applied to the bundle/bitstream. Collection's policies are inherited + * if there are no other policies defined or if the append mode is defined by + * the configuration via the core.authorization.installitem.inheritance-read.append-mode property + * + * @param context DSpace context object + * @param item Item to adjust policies on + * @param collection Collection + * @throws SQLException If database error + * @throws AuthorizeException If authorization error + */ + public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) + throws SQLException, AuthorizeException; + /** * Adjust the Item's policies to reflect what have been defined during the diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index dbbb7bbc5e4d..4feaced075a9 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -184,7 +184,7 @@ public void updateResourcePolicy(Context context, ResourcePolicy resourcePolicy) * @param endDate End date of the resource policy. If {@link #getHasEndDate()} * returns false, endDate should be null. Otherwise endDate may not be null. */ - private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) + public void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) throws SQLException, AuthorizeException, ParseException { if (getHasStartDate() && Objects.isNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " requires a start date."); diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml index 140fe0abf50c..808d22a5bf24 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -86,4 +86,9 @@ + + + + + diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java new file mode 100644 index 000000000000..b63d31f52464 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -0,0 +1,437 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.core.Constants.READ; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Collectors; + +import javax.validation.constraints.AssertTrue; + +import org.apache.commons.io.file.PathUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.matcher.ResourcePolicyMatcher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.GroupTest; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.matcher.DateMatcher; +import org.dspace.util.MultiFormatDateParser; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Basic integration testing for the Bulk Access conditions Feature{@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlIT extends AbstractIntegrationTestWithDatabase { + private Path tempDir; + private String tempFilePath; + private Collection collection; + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + + @Before + @Override + public void setUp() throws Exception { + + super.setUp(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + + context.restoreAuthSystemState(); + + tempDir = Files.createTempDirectory("bulkAccessTest"); + tempFilePath = tempDir + "/bulk-access.json"; + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + super.destroy(); + } + + @Test + public void performBulkAccessWithoutRequiredParamTest() throws Exception { + + buildJsonFile(""); + + String[] args = new String[] {"bulk-access-control", "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithEmptyJsonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).withTitle("title").build(); + + context.restoreAuthSystemState(); + + buildJsonFile(""); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"wrong\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"bitstream\": {\n" + + " \"mode\": \"wrong\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"wrongAccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + context.restoreAuthSystemState(); + } + + @Test + public void performBulkAccessWithInvalidAccessConditionDateTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"endDate\": \"2024-06-24T23:59:59.999+0000\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + String jsonTwo = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"startDate\": \"2024-06-24T23:59:59.999+0000\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonTwo); + + args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithValidJsonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Community subCommunityThree = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) + .withName("collection two") + .build(); + + Collection collectionThree = CollectionBuilder.createCollection(context, subCommunityThree) + .withName("collection three") + .build(); + + ItemBuilder.createItem(context, collectionOne).build(); + + ItemBuilder.createItem(context, collectionTwo).build(); + + Item itemThree = ItemBuilder.createItem(context, collectionThree).withTitle("item three title").build(); + + Item itemFour = ItemBuilder.createItem(context, collectionThree).withTitle("item four title").build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-u", itemThree.getID().toString(), + "-f", tempFilePath + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + Iterator itemIteratorOne = itemService.findByCollection(context, collectionOne); + Iterator itemIteratorTwo = itemService.findByCollection(context, collectionTwo); + itemThree = context.reloadEntity(itemThree); + itemFour = context.reloadEntity(itemFour); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + +// matchItemsResourcePolicies(itemIteratorOne, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null); +// matchItemsResourcePolicies(itemIteratorTwo, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null); +// matchItemResourcePolicies(itemThree, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null); + + assertThat(itemThree.getResourcePolicies(), hasSize(2)); + assertThat(itemThree.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24T00:00:00.000Z", null, null) + )); + + // just a note here is working fine + assertThat(itemThree.getResourcePolicies(), hasItem( + matches(READ, anonymousGroup, "embargo", TYPE_CUSTOM, + itemThree.getResourcePolicies().get(0).getStartDate(), null, null) + )); + + assertThat(itemFour.getResourcePolicies().size(), is(1)); + assertThat(itemFour.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + + + + } + + private void matchItemsResourcePolicies( + Iterator itemIterator, Group group, String rpName, String rpType, String startDate, String endDate) { + while (itemIterator.hasNext()) { + Item item = itemIterator.next(); + matchItemResourcePolicies(item, group, rpName, rpType, startDate, endDate); + } + } + + private void matchItemResourcePolicies( + Item item, Group group, String rpName, String rpType, String startDate, String endDate) { + + assertThat(item.getResourcePolicies(), hasItem( + matches(READ, group, rpName, rpType, startDate, endDate, null))); + } + + private void buildJsonFile(String json) throws IOException { + File file = new File(tempDir + "/bulk-access.json"); + Path path = Paths.get(file.getAbsolutePath()); + Files.writeString(path, json, StandardCharsets.UTF_8); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java new file mode 100644 index 000000000000..26ea7dcb5a35 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.matcher; + +import static org.dspace.util.MultiFormatDateParser.parse; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +import java.util.Date; + +import org.dspace.authorize.ResourcePolicy; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link Matcher} to match a ResourcePolicy. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResourcePolicyMatcher extends TypeSafeMatcher { + + private final Matcher actionId; + + private final Matcher ePerson; + + private final Matcher group; + + private final Matcher rptype; + + private final Matcher rpName; + + private final Matcher description; + + private final Matcher startDate; + + private final Matcher endDate; + + public ResourcePolicyMatcher(Matcher actionId, Matcher ePerson, Matcher group, + Matcher rpName, Matcher rptype, Matcher startDate, + Matcher endDate, Matcher description) { + this.actionId = actionId; + this.ePerson = ePerson; + this.group = group; + this.rptype = rptype; + this.rpName = rpName; + this.description = description; + this.startDate = startDate; + this.endDate = endDate; + } + + @Override + public void describeTo(Description description) { + description.appendText("Resource policy with action id ").appendDescriptionOf(actionId) + .appendText(" and EPerson ").appendDescriptionOf(ePerson) + .appendText(" and Group ").appendDescriptionOf(group) + .appendText(" and rpType ").appendDescriptionOf(rptype) + .appendText(" and rpName ").appendDescriptionOf(rpName) + .appendText(" and description ").appendDescriptionOf(this.description) + .appendText(" and start date ").appendDescriptionOf(startDate) + .appendText(" and end date ").appendDescriptionOf(endDate); + } + + public static ResourcePolicyMatcher matches(int actionId, EPerson ePerson, String rptype) { + return new ResourcePolicyMatcher(is(actionId), is(ePerson), nullValue(Group.class), + any(String.class), is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, EPerson ePerson, String rpName, String rptype) { + return new ResourcePolicyMatcher(is(actionId), is(ePerson), nullValue(Group.class), + is(rpName), is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rptype) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), + any(String.class), is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rptype) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), is(rpName), + is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rptype, + String description) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), is(rpName), + is(rptype), any(Date.class), any(Date.class), is(description)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rpType, Date startDate, + Date endDate, String description) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), is(rpName), + is(rpType), is(startDate), is(endDate), is(description)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rpType, + String startDate, String endDate, String description) { + return matches(actionId, group, rpName, rpType, startDate != null ? parse(startDate) : null, + endDate != null ? parse(endDate) : null, description); + } + + @Override + protected boolean matchesSafely(ResourcePolicy resourcePolicy) { + return actionId.matches(resourcePolicy.getAction()) + && ePerson.matches(resourcePolicy.getEPerson()) + && group.matches(resourcePolicy.getGroup()) + && rptype.matches(resourcePolicy.getRpType()) + && rpName.matches(resourcePolicy.getRpName()) + && description.matches(resourcePolicy.getRpDescription()) + && startDate.matches(resourcePolicy.getStartDate()) + && endDate.matches(resourcePolicy.getEndDate()); + } + + private static Matcher any(Class clazz) { + return LambdaMatcher.matches((obj) -> true, "any value"); + } + +} diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index fd218aa77a8d..79927833d26e 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -43,4 +43,9 @@ + + + + + diff --git a/dspace/config/spring/api/scripts.xml b/dspace/config/spring/api/scripts.xml index 19f558dab6d2..56eacbfff29c 100644 --- a/dspace/config/spring/api/scripts.xml +++ b/dspace/config/spring/api/scripts.xml @@ -86,4 +86,9 @@ + + + + + diff --git a/dspace/config/spring/rest/scripts.xml b/dspace/config/spring/rest/scripts.xml index 1748c0fb4516..eda8c579a89c 100644 --- a/dspace/config/spring/rest/scripts.xml +++ b/dspace/config/spring/rest/scripts.xml @@ -63,4 +63,10 @@ + + + + + + From 0528b4dd30876c790bd81eda12e7dcb36ac9f602 Mon Sep 17 00:00:00 2001 From: Kristof De Langhe Date: Thu, 11 May 2023 11:16:16 +0200 Subject: [PATCH 182/686] 100414: Missing search result statistics code cleanup + extra test --- .../rest/converter/SearchEventConverter.java | 31 ++++++++-- .../app/rest/model/SearchEventRest.java | 10 +-- .../app/rest/utils/DSpaceObjectResolver.java | 61 ------------------- .../app/rest/SearchEventRestRepositoryIT.java | 61 ++++++++++++++++++- 4 files changed, 88 insertions(+), 75 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java index a8203e272f29..126d37ba1ace 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java @@ -7,16 +7,20 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import javax.servlet.http.HttpServletRequest; +import org.apache.log4j.Logger; import org.dspace.app.rest.model.PageRest; import org.dspace.app.rest.model.SearchEventRest; import org.dspace.app.rest.model.SearchResultsRest; -import org.dspace.app.rest.utils.DSpaceObjectResolver; import org.dspace.app.rest.utils.ScopeResolver; +import org.dspace.app.util.service.DSpaceObjectUtils; import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.usage.UsageEvent; @@ -26,22 +30,37 @@ @Component public class SearchEventConverter { + /* Log4j logger */ + private static final Logger log = Logger.getLogger(SearchEventConverter.class); @Autowired private ScopeResolver scopeResolver; @Autowired - private DSpaceObjectResolver dSpaceObjectResolver; + private DSpaceObjectUtils dSpaceObjectUtils; + + private final Integer[] allowedClickedObjectTypes = + new Integer[]{Constants.COMMUNITY, Constants.COLLECTION, Constants.ITEM}; public UsageSearchEvent convert(Context context, HttpServletRequest request, SearchEventRest searchEventRest) { UsageSearchEvent usageSearchEvent = new UsageSearchEvent(UsageEvent.Action.SEARCH, request, context, null); usageSearchEvent.setQuery(searchEventRest.getQuery()); usageSearchEvent.setDsoType(searchEventRest.getDsoType()); - if (searchEventRest.getObject() != null) { - IndexableObject object = dSpaceObjectResolver.resolveObject(context, searchEventRest.getObject()); - if (object != null && object.getIndexedObject() instanceof DSpaceObject) { - usageSearchEvent.setObject((DSpaceObject) object.getIndexedObject()); + if (searchEventRest.getClickedObject() != null) { + try { + DSpaceObject clickedObject = + dSpaceObjectUtils.findDSpaceObject(context, searchEventRest.getClickedObject()); + if (clickedObject != null && + Arrays.asList(allowedClickedObjectTypes).contains(clickedObject.getType())) { + usageSearchEvent.setObject(clickedObject); + } else { + throw new IllegalArgumentException("UUID " + searchEventRest.getClickedObject() + + " was expected to resolve to a Community, Collection or Item, but didn't resolve to any"); + } + } catch (SQLException e) { + log.warn("Unable to retrieve DSpace Object with ID " + searchEventRest.getClickedObject() + + " from the database", e); } } if (searchEventRest.getScope() != null) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java index 637acb9bfd68..46827711f2ea 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java @@ -25,7 +25,7 @@ public class SearchEventRest extends BaseObjectRest { private UUID scope; private String configuration; private String dsoType; - private UUID object; + private UUID clickedObject; private List appliedFilters; private SearchResultsRest.Sorting sort; private PageRest page; @@ -99,11 +99,11 @@ public void setDsoType(String dsoType) { this.dsoType = dsoType; } - public UUID getObject() { - return object; + public UUID getClickedObject() { + return clickedObject; } - public void setObject(UUID object) { - this.object = object; + public void setClickedObject(UUID clickedObject) { + this.clickedObject = clickedObject; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java deleted file mode 100644 index 7ded06bdf52a..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.utils; - -import java.sql.SQLException; -import java.util.UUID; - -import org.apache.log4j.Logger; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.discovery.IndexableObject; -import org.dspace.discovery.indexobject.IndexableCollection; -import org.dspace.discovery.indexobject.IndexableCommunity; -import org.dspace.discovery.indexobject.IndexableItem; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -@Component -public class DSpaceObjectResolver { - /* Log4j logger */ - private static final Logger log = Logger.getLogger(DSpaceObjectResolver.class); - - @Autowired - ItemService itemService; - - @Autowired - CollectionService collectionService; - - @Autowired - CommunityService communityService; - - public IndexableObject resolveObject(Context context, UUID uuid) { - IndexableObject object = null; - if (uuid != null) { - try { - object = new IndexableCommunity(communityService.find(context, uuid)); - if (object.getIndexedObject() == null) { - object = new IndexableCollection(collectionService.find(context, uuid)); - } - if (object.getIndexedObject() == null) { - object = new IndexableItem(itemService.find(context, uuid)); - } - if (object.getIndexedObject() == null) { - throw new IllegalArgumentException("UUID " + uuid + " is expected to resolve to a Community, " + - "Collection or Item, but didn't resolve to any"); - } - } catch (SQLException e) { - log.warn("Unable to retrieve DSpace Object with ID " + uuid + " from the database", e); - } - } - return object; - } - -} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index 63ca4b735dae..978d8feb58b9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -413,7 +413,7 @@ public void postTestSuccesEmptyQuery() throws Exception { } @Test - public void postTestWithObjectSuccess() throws Exception { + public void postTestWithClickedObjectSuccess() throws Exception { context.turnOffAuthorisationSystem(); @@ -441,10 +441,10 @@ public void postTestWithObjectSuccess() throws Exception { SearchEventRest searchEventRest = new SearchEventRest(); searchEventRest.setQuery("test"); - searchEventRest.setScope(publicItem1.getID()); + searchEventRest.setScope(col1.getID()); searchEventRest.setConfiguration("default"); searchEventRest.setDsoType("item"); - searchEventRest.setObject(publicItem1.getID()); + searchEventRest.setClickedObject(publicItem1.getID()); SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); searchEventRest.setSort(sort); @@ -466,4 +466,59 @@ public void postTestWithObjectSuccess() throws Exception { .andExpect(status().isCreated()); } + + @Test + public void postTestWithClickedObjectNotExisting() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(UUID.randomUUID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isBadRequest()); + + } } From 9e013ef97f40d0840435c289133bc4c51e8eeeb4 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Thu, 11 May 2023 14:10:34 +0300 Subject: [PATCH 183/686] refactoring BulkAccessControl --- .../bulkaccesscontrol/BulkAccessControl.java | 191 ++++++++++-------- .../model/AccessConditionBitstream.java | 4 + .../model/AccessConditionItem.java | 4 + 3 files changed, 116 insertions(+), 83 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index dc3a6da56ba4..af9d4e6c1f0e 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -25,10 +25,12 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.cli.ParseException; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException; import org.dspace.app.bulkaccesscontrol.model.AccessCondition; import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem; import org.dspace.app.bulkaccesscontrol.model.AccessControl; import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; @@ -36,16 +38,11 @@ import org.dspace.app.util.service.DSpaceObjectUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.BitstreamService; -import org.dspace.content.service.BundleService; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; @@ -54,16 +51,12 @@ import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchUtils; import org.dspace.discovery.indexobject.IndexableItem; -import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; import org.dspace.scripts.DSpaceRunnable; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.submit.model.AccessConditionOption; -import org.dspace.util.MultiFormatDateParser; import org.dspace.utils.DSpace; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -82,30 +75,16 @@ public class BulkAccessControl extends DSpaceRunnable uploadAccessConditions; + private final String ADD_MODE = "add"; + + private final String REPLACE_MODE = "replace"; + @Override @SuppressWarnings("unchecked") public void setup() throws ParseException { this.searchService = SearchUtils.getSearchService(); - this.indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance(); - this.communityService = ContentServiceFactory.getInstance().getCommunityService(); - this.collectionService = ContentServiceFactory.getInstance().getCollectionService(); this.itemService = ContentServiceFactory.getInstance().getItemService(); - this.authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - this.bundleService = ContentServiceFactory.getInstance().getBundleService(); - this.bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName( @@ -190,33 +167,80 @@ public void internalRun() throws Exception { } } - private void validate(AccessControl accessControl) { + private void validate(AccessControl accessControl) throws SQLException { - if (Objects.isNull(accessControl.getItem()) && Objects.isNull(accessControl.getBitstream())) { - handler.logError("item or bitstream nodes must be provided"); + AccessConditionItem item = accessControl.getItem(); + AccessConditionBitstream bitstream = accessControl.getBitstream(); + + if (Objects.isNull(item) && Objects.isNull(bitstream)) { + handler.logError("item or bitstream node must be provided"); + throw new BulkAccessControlException("item or bitstream node must be provided"); } - if (Objects.nonNull(accessControl.getItem())) { - if (StringUtils.isNotEmpty(accessControl.getItem().getMode()) && - !(accessControl.getItem().getMode().equals("add") || - accessControl.getItem().getMode().equals("replace"))) { - handler.logError("wrong value for item mode<" + accessControl.getItem().getMode() + ">"); - } + if (Objects.nonNull(item)) { + validateItemNode(item); + } - for (AccessCondition accessCondition : accessControl.getItem().getAccessConditions()) { - validateAccessCondition(accessCondition); - } + if (Objects.nonNull(bitstream)) { + validateBitstreamNode(bitstream); } + } - if (Objects.nonNull(accessControl.getBitstream())) { - if (StringUtils.isNotEmpty(accessControl.getBitstream().getMode()) && - !(accessControl.getBitstream().getMode().equals("add") || - accessControl.getBitstream().getMode().equals("replace"))) { - handler.logError("wrong value for bitstream mode<" + accessControl.getBitstream().getMode() + ">"); - } + private void validateItemNode(AccessConditionItem item) { + String mode = item.getMode(); + List accessConditions = item.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("item mode node must be provided"); + throw new BulkAccessControlException("item mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for item mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for item mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && CollectionUtils.isEmpty(accessConditions)) { + handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + } + + for (AccessCondition accessCondition : accessConditions) { + validateAccessCondition(accessCondition); + } + } + + private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException { + String mode = bitstream.getMode(); + List accessConditions = bitstream.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("bitstream mode node must be provided"); + throw new BulkAccessControlException("bitstream mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for bitstream mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && CollectionUtils.isEmpty(accessConditions)) { + handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + } + + validateConstraint(bitstream); + + for (AccessCondition accessCondition : bitstream.getAccessConditions()) { + validateAccessCondition(accessCondition); + } + } + + private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException { + if (uuids.length > 1 && containsConstraints(bitstream)) { + handler.logError("constraint isn't supported when multiple uuids are provided"); + throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided"); + } else { + DSpaceObject dso = + dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids[0])); - for (AccessCondition accessCondition : accessControl.getBitstream().getAccessConditions()) { - validateAccessCondition(accessCondition); + if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) { + handler.logError("constraint is not supported when uuid isn't an Item"); + throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item"); } } } @@ -225,14 +249,14 @@ private void validateAccessCondition(AccessCondition accessCondition) { if (!itemAccessConditions.containsKey(accessCondition.getName())) { handler.logError("wrong access condition <" + accessCondition.getName() + ">"); - throw new IllegalArgumentException("wrong access condition <" + accessCondition.getName() + ">"); + throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">"); } try { itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy( context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate()); } catch (Exception e) { - handler.logError("invalid access condition"); + handler.logError("invalid access condition" + e.getMessage()); handler.handleException(e); } } @@ -262,10 +286,10 @@ public void updateItemsAndBitstreamsPolices(AccessControl accessControl) context.commit(); context.uncacheEntity(item); - counter++; if (counter == limit) { + counter = 0; start += limit; itemIterator = findItems(query, start, limit); } @@ -273,10 +297,11 @@ public void updateItemsAndBitstreamsPolices(AccessControl accessControl) } private String buildSolrQuery(String[] uuids) throws SQLException { - HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); String [] query = new String[uuids.length]; + for (int i = 0 ; i < query.length ; i++) { DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids[i])); + if (dso.getType() == Constants.COMMUNITY) { query[i] = "location.comm:" + dso.getID(); } else if (dso.getType() == Constants.COLLECTION) { @@ -314,7 +339,7 @@ private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { private void updateItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { - if ("replace".equals(accessControl.getItem().getMode())) { + if (REPLACE_MODE.equals(accessControl.getItem().getMode())) { removeReadPolicies(item, TYPE_CUSTOM); removeReadPolicies(item, TYPE_INHERITED); } @@ -323,8 +348,10 @@ private void updateItemPolicies(Item item, AccessControl accessControl) throws S } private void setItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { - if (isAppendModeDisabled() && item.isArchived()) { - // change to add + + AccessConditionItem itemControl = accessControl.getItem(); + + if (isAdjustPoliciesNeeded(item, itemControl.getMode(), itemControl.getAccessConditions())) { itemService.adjustItemPolicies(context, item, item.getOwningCollection()); } @@ -335,27 +362,27 @@ private void setItemPolicies(Item item, AccessControl accessControl) throws SQLE itemAccessConditions.get(accessCondition.getName()))); } + private boolean isAdjustPoliciesNeeded(Item item, String mode, List accessConditions) { + return (isAppendModeDisabled() && item.isArchived()) || + (REPLACE_MODE.equals(mode) && CollectionUtils.isEmpty(accessConditions)); + } + private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { - if (containsConstraints(accessControl)) { + if (containsConstraints(accessControl.getBitstream())) { findMatchedBitstreams(item, accessControl.getBitstream().getConstraint().getUuids()) - .forEach(bitstream -> - updateBitstreamPolicies(bitstream, item, accessControl)); + .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); } else { - item.getBundles() - .stream() - .flatMap(bundle -> bundle.getBitstreams().stream()) + findAllBitstreams(item) .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); } } - private boolean containsConstraints(AccessControl accessControl) { - AccessConditionBitstream controlBitstream = accessControl.getBitstream(); - - return Objects.nonNull(controlBitstream) && - Objects.nonNull(controlBitstream.getConstraint()) && - isNotEmpty(controlBitstream.getConstraint().getUuids()); + private boolean containsConstraints(AccessConditionBitstream bitstream) { + return Objects.nonNull(bitstream) && + Objects.nonNull(bitstream.getConstraint()) && + isNotEmpty(bitstream.getConstraint().getUuids()); } private List findMatchedBitstreams(Item item, List uuids) { @@ -365,9 +392,16 @@ private List findMatchedBitstreams(Item item, List uuids) { .collect(Collectors.toList()); } + private List findAllBitstreams(Item item) { + return item.getBundles() + .stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); + } + private void updateBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) { - if ("replace".equals(accessControl.getBitstream().getMode())) { + if (REPLACE_MODE.equals(accessControl.getBitstream().getMode())) { removeReadPolicies(bitstream, TYPE_CUSTOM); removeReadPolicies(bitstream, TYPE_INHERITED); } @@ -390,7 +424,10 @@ private void removeReadPolicies(DSpaceObject dso, String type) { private void setBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) throws SQLException, AuthorizeException { - if (isAppendModeDisabled() && item.isArchived()) { + + AccessConditionBitstream bitstreamControl = accessControl.getBitstream(); + + if (isAdjustPoliciesNeeded(item, bitstreamControl.getMode(), bitstreamControl.getAccessConditions())) { itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); } @@ -411,18 +448,10 @@ private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondit try { AccessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); } catch (Exception e) { - throw new RuntimeException(e); + throw new BulkAccessControlException(e); } } -// private void rollback() { -// try { -// context.rollback(); -// } catch (SQLException e) { -// throw new SQLRuntimeException(e); -// } -// } - private void assignCurrentUserInContext() throws SQLException { UUID uuid = getEpersonIdentifier(); if (uuid != null) { @@ -437,10 +466,6 @@ private void assignSpecialGroupsInContext() throws SQLException { } } - private Date parseDate(String date) { - return MultiFormatDateParser.parse(date); - } - private boolean isAppendModeDisabled() { return !configurationService.getBooleanProperty( "core.authorization.installitem.inheritance-read.append-mode"); diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java index 0e5ee47d4ec7..f9f5cbce8de8 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -7,6 +7,7 @@ */ package org.dspace.app.bulkaccesscontrol.model; +import java.util.ArrayList; import java.util.List; /** @@ -37,6 +38,9 @@ public void setConstraint(Constraint constraint) { } public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } return accessConditions; } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java index 6176cc220c52..10f17c1c01bf 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java @@ -7,6 +7,7 @@ */ package org.dspace.app.bulkaccesscontrol.model; +import java.util.ArrayList; import java.util.List; /** @@ -27,6 +28,9 @@ public void setMode(String mode) { } public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } return accessConditions; } From c670251a68433cfaecfa73f65665b9892aef22fe Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Thu, 11 May 2023 17:13:13 +0200 Subject: [PATCH 184/686] 94299: Fix minor issues --- .../rest/BitstreamCategoryRestController.java | 1 - .../operation/BitstreamRemoveOperation.java | 15 +- .../app/rest/BitstreamRestRepositoryIT.java | 299 ++++++++++++++++++ 3 files changed, 313 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java index 13929e5a9a73..6d970eb10961 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -51,7 +51,6 @@ public class BitstreamCategoryRestController { * @throws SQLException if an error occurs while accessing the database. * @throws AuthorizeException if the user is not authorized to perform the requested operation. */ - @PreAuthorize("hasAuthority('ADMIN')") @RequestMapping(method = RequestMethod.PATCH) public ResponseEntity> patch(HttpServletRequest request, @RequestBody(required = true) JsonNode jsonNode) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java index 93c495a30233..77336002716f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -14,10 +14,13 @@ import org.dspace.app.rest.exception.RESTBitstreamNotFoundException; import org.dspace.app.rest.model.patch.Operation; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.service.BitstreamService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.AccessDeniedException; import org.springframework.stereotype.Component; /** @@ -38,6 +41,8 @@ public class BitstreamRemoveOperation extends PatchOperation { @Autowired BitstreamService bitstreamService; + @Autowired + AuthorizeService authorizeService; public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; @Override @@ -47,10 +52,10 @@ public Bitstream perform(Context context, Bitstream resource, Operation operatio if (bitstreamToDelete == null) { throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); } + authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE); try { bitstreamService.delete(context, bitstreamToDelete); - bitstreamService.update(context, bitstreamToDelete); } catch (AuthorizeException | IOException e) { throw new RuntimeException(e.getMessage(), e); } @@ -62,4 +67,12 @@ public boolean supports(Object objectToMatch, Operation operation) { return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); } + + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) throws SQLException { + try { + authorizeService.authorizeAction(context, bitstream, operation); + } catch (AuthorizeException e) { + throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 3b01b4eac2ab..2d855a06c238 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -46,6 +46,7 @@ import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Bitstream; @@ -56,6 +57,8 @@ import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; @@ -86,6 +89,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest @Autowired private ItemService itemService; + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + @Test public void findAllTest() throws Exception { //We turn off the authorization system in order to create the structure as defined below @@ -2490,6 +2499,296 @@ public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); } + @Test + public void deleteBitstreamsInBulk_Unauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient().perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + } + + @Test + public void deleteBitstreamsInBulk_Forbidden() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void deleteBitstreamsInBulk_collectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson col1Admin = EPersonBuilder.createEPerson(context) + .withEmail("col1admin@test.com") + .withPassword(password) + .build(); + EPerson col2Admin = EPersonBuilder.createEPerson(context) + .withEmail("col2admin@test.com") + .withPassword(password) + .build(); + Group col1_AdminGroup = collectionService.createAdministrators(context, col1); + Group col2_AdminGroup = collectionService.createAdministrators(context, col2); + groupService.addMember(context, col1_AdminGroup, col1Admin); + groupService.addMember(context, col2_AdminGroup, col2Admin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(col1Admin.getEmail(), password); + // Should return forbidden since one of the bitstreams does not originate form collection 1 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(2); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Change the token to the admin of collection 2 + token = getAuthToken(col2Admin.getEmail(), password); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + ops = new ArrayList<>(); + removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp1); + removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp2); + removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID()); + ops.add(removeOp3); + patchBody = getPatchContent(ops); + + // Should return forbidden since one of the bitstreams does not originate form collection 2 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(0); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + @Test + public void deleteBitstreamsInBulk_communityAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context) + .withEmail("parentComAdmin@test.com") + .withPassword(password) + .build(); + Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity); + groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(parentCommunityAdmin.getEmail(), password); + // Bitstreams originate from two different collections, but those collections live in the same community, so + // a community admin should be able to delete them + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { for (Bitstream bitstream : bitstreams) { if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) From a026a8ced06ba5ec4865d0e46c5dc0f18d20306a Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Thu, 11 May 2023 11:43:54 -0400 Subject: [PATCH 185/686] Fix test broken by new injection plan. --- .../eperson/DeleteEPersonSubmitterIT.java | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java index e020c04b1a25..d1679ae1d20b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java @@ -24,9 +24,11 @@ import java.util.concurrent.atomic.AtomicReference; import javax.ws.rs.core.MediaType; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.requestitem.RequestItemAuthor; import org.dspace.app.requestitem.RequestItemAuthorExtractor; +import org.dspace.app.requestitem.RequestItemHelpdeskStrategy; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; @@ -46,6 +48,7 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.versioning.Version; import org.dspace.versioning.factory.VersionServiceFactory; @@ -76,7 +79,7 @@ public class DeleteEPersonSubmitterIT extends AbstractControllerIntegrationTest protected RequestItemAuthorExtractor requestItemAuthorExtractor = DSpaceServicesFactory.getInstance() .getServiceManager() - .getServiceByName("org.dspace.app.requestitem.RequestItemAuthorExtractor", + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), RequestItemAuthorExtractor.class); @@ -85,15 +88,8 @@ public class DeleteEPersonSubmitterIT extends AbstractControllerIntegrationTest private EPerson submitterForVersion2; private EPerson workflowUser; - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DeleteEPersonSubmitterIT.class); + private static final Logger log = LogManager.getLogger(); - /** - * This method will be run before every test as per @Before. It will - * initialize resources required for the tests. - * - * Other methods can be annotated with @Before here or in subclasses but no - * execution order is guaranteed - */ @Before @Override public void setUp() throws Exception { @@ -114,8 +110,8 @@ public void setUp() throws Exception { /** - * This test verifies that when the submitter Eperson is deleted, the delete succeeds and the item will have - * 'null' as submitter + * This test verifies that when the submitter Eperson is deleted, the delete + * succeeds and the item will have 'null' as submitter. * * @throws Exception */ @@ -140,12 +136,21 @@ public void testArchivedItemSubmitterDelete() throws Exception { assertNull(retrieveItemSubmitter(installItem.getID())); + // Don't depend on external configuration; set up helpdesk as needed. + final String HELPDESK_EMAIL = "dspace-help@example.com"; + final String HELPDESK_NAME = "Help Desk"; + ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + configurationService.setProperty("mail.helpdesk", HELPDESK_EMAIL); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + // Test it. Item item = itemService.find(context, installItem.getID()); List requestItemAuthor = requestItemAuthorExtractor.getRequestItemAuthor(context, item); - assertEquals("Help Desk", requestItemAuthor.get(0).getFullName()); - assertEquals("dspace-help@myu.edu", requestItemAuthor.get(0).getEmail()); + assertEquals(HELPDESK_NAME, requestItemAuthor.get(0).getFullName()); + assertEquals(HELPDESK_EMAIL, requestItemAuthor.get(0).getEmail()); } /** From 7c7824f913c5db9cffafb5afed8394cc441d5c8c Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 11 May 2023 17:57:45 +0200 Subject: [PATCH 186/686] Implement community feedback --- .../DiscoveryConfigurationService.java | 3 +- .../DiscoveryScopeBasedRestControllerIT.java | 52 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index da23b87a3594..1a1ed95a2965 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -13,6 +13,7 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -42,7 +43,7 @@ public class DiscoveryConfigurationService { * own configuration, we take the one of the first parent that does. * This cache ensures we do not have to go up the hierarchy every time. */ - private final Map comColToDiscoveryConfigurationMap = new HashMap<>(); + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); public Map getMap() { return map; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index 0c8735545e8b..a3408a7736df 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -58,6 +58,9 @@ * * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to * power these facets are indexed properly. + * + * This file requires the discovery configuration in the following test file: + * src/test/data/dspaceFolder/config/spring/api/test-discovery.xml */ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { @@ -263,6 +266,9 @@ public void setUp() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1. + */ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) @@ -301,6 +307,9 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11. + */ public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) @@ -330,6 +339,9 @@ public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111. + */ public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) @@ -357,6 +369,10 @@ public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited + * correctly for Collection 112. + */ public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) @@ -382,6 +398,10 @@ public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Subcommunity 12. + */ public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) @@ -411,6 +431,9 @@ public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121. + */ public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) @@ -436,6 +459,10 @@ public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Collection 122. + */ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) @@ -463,6 +490,10 @@ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { } @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Parent Community 2. + */ public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) @@ -481,6 +512,9 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21. + */ public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) @@ -510,6 +544,9 @@ public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211. + */ public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) @@ -537,6 +574,10 @@ public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited + * correctly for Collection 212. + */ public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) @@ -562,6 +603,10 @@ public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { } @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Subcommunity 22. + */ public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) @@ -579,6 +624,9 @@ public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { } @Test + /** + * Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221. + */ public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) @@ -604,6 +652,10 @@ public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { } @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Collection 222. + */ public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) From 0231af5867eeaa2e9a17b27935e9ca505df5e88c Mon Sep 17 00:00:00 2001 From: damian Date: Thu, 11 May 2023 20:08:32 +0200 Subject: [PATCH 187/686] Item counters added to Communities and Collections REST API --- .../main/java/org/dspace/content/Collection.java | 9 +++++++++ .../org/dspace/content/CollectionServiceImpl.java | 10 ++++++++++ .../main/java/org/dspace/content/Community.java | 8 ++++++++ .../org/dspace/content/CommunityServiceImpl.java | 11 ++++++++++- .../java/org/dspace/content/dao/CollectionDAO.java | 3 +++ .../dspace/content/dao/impl/CollectionDAOImpl.java | 14 ++++++++++---- .../dspace/content/service/CollectionService.java | 2 ++ .../dspace/content/service/CommunityService.java | 3 +++ .../app/rest/converter/CollectionConverter.java | 8 ++++++++ .../app/rest/converter/CommunityConverter.java | 7 +++++++ .../org/dspace/app/rest/model/CollectionRest.java | 12 ++++++++++++ .../org/dspace/app/rest/model/CommunityRest.java | 10 ++++++++++ 12 files changed, 92 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index ffec3b45cc87..33fbb51e3fa7 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -29,6 +29,7 @@ import javax.persistence.Transient; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -336,4 +337,12 @@ private CollectionService getCollectionService() { return collectionService; } + public int getAllItems() { + try { + return collectionService.countArchivedItem(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index ddfd38694f23..961ad4cbcf0e 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -31,6 +31,8 @@ import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CollectionDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -1047,4 +1049,12 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu return (int) resp.getTotalSearchResults(); } + @Override + public int countArchivedItem(Collection collection) throws ItemCountException { + //TODO load ItemCounter on bean creation + Context context = new Context(); + return new ItemCounter(context).getCount(collection); + } + + } diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index fa99da33091a..5b8baba8e498 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -25,6 +25,7 @@ import javax.persistence.Transient; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CommunityService; @@ -264,4 +265,11 @@ private CommunityService getCommunityService() { return communityService; } + public int getAllItems() { + try { + return communityService.getArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index 923745f761bb..7fe8dfd28951 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -24,6 +24,8 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CommunityDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -76,9 +78,9 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl imp protected IdentifierService identifierService; @Autowired(required = true) protected SubscribeService subscribeService; + protected CommunityServiceImpl() { super(); - } @Override @@ -708,4 +710,11 @@ public Community findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return communityDAO.countRows(context); } + + @Override + public int getArchivedItems(Community community) throws ItemCountException { + //TODO load ItemCounter on bean creation + Context context = new Context(); + return new ItemCounter(context).getCount(community); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java index 6bb65bbb46d8..c8014fb52097 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java @@ -10,6 +10,7 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; +import java.util.UUID; import org.dspace.content.Collection; import org.dspace.content.Item; @@ -53,4 +54,6 @@ public List findAll(Context context, MetadataField order, Integer li int countRows(Context context) throws SQLException; List> getCollectionsWithBitstreamSizesTotal(Context context) throws SQLException; + + int countArchivedItems(Context context, UUID id) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java index c0ef6ea42fce..dc8bad7e19c4 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java @@ -7,11 +7,9 @@ */ package org.dspace.content.dao.impl; +import java.math.BigInteger; import java.sql.SQLException; -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.util.*; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; @@ -182,4 +180,12 @@ public List> getCollectionsWithBitstreamSizesTotal(C } return returnList; } + + @Override + public int countArchivedItems(Context context, UUID collectionId) throws SQLException { + return ((BigInteger) getHibernateSession(context) + .createNativeQuery("select count(*) from collection2item ci left join item i on i.uuid = ci.item_id where i.in_archive and uuid(ci.collection_id)=uuid(:collectionId)") + .setParameter("collectionId", collectionId.toString()) + .getSingleResult()).intValue(); + } } \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index a5b2b7d8d891..d1ad6dcef2e0 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -15,6 +15,7 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -455,4 +456,5 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; + int countArchivedItem(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java index e7b62126650c..65454f26ea72 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java @@ -14,6 +14,7 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -292,4 +293,6 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu public List findAuthorizedGroupMapped(Context context, List actions) throws SQLException; int countTotal(Context context) throws SQLException; + + int getArchivedItems(Community community) throws ItemCountException; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java index fc5d99b05924..9a5000852df5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java @@ -8,7 +8,9 @@ package org.dspace.app.rest.converter; import org.dspace.app.rest.model.CollectionRest; +import org.dspace.app.rest.projection.Projection; import org.dspace.content.Collection; +import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.springframework.stereotype.Component; @@ -22,6 +24,12 @@ public class CollectionConverter extends DSpaceObjectConverter implements IndexableObjectConverter { + @Override + public CollectionRest convert(Collection collection, Projection projection) { + CollectionRest resource = super.convert(collection, projection); + resource.setArchivedItems(collection.getAllItems()); + return resource; + } @Override protected CollectionRest newInstance() { return new CollectionRest(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java index d4c06470ce86..ef1f114ccbf9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CommunityConverter.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.converter; import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.projection.Projection; import org.dspace.content.Community; import org.dspace.discovery.IndexableObject; import org.springframework.stereotype.Component; @@ -23,6 +24,12 @@ public class CommunityConverter extends DSpaceObjectConverter implements IndexableObjectConverter { + public CommunityRest convert(Community community, Projection projection) { + CommunityRest resource = super.convert(community, projection); + resource.setArchivedItems(community.getAllItems()); + return resource; + } + @Override protected CommunityRest newInstance() { return new CommunityRest(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java index 1de4ec632cff..7405a1525dc1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CollectionRest.java @@ -74,4 +74,16 @@ public String getCategory() { public String getType() { return NAME; } + + private int archivedItems; + + public int getArchivedItems() { + return archivedItems; + } + + public void setArchivedItems(int archivedItems) { + this.archivedItems = archivedItems; + } + + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java index f8ccbad10e62..8ffdcd2e8ae9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CommunityRest.java @@ -58,4 +58,14 @@ public String getCategory() { public String getType() { return NAME; } + + private int archivedItems; + + public int getArchivedItems() { + return archivedItems; + } + + public void setArchivedItems(int archivedItems) { + this.archivedItems = archivedItems; + } } From 78fba6b579c3af233c80861c6efb90491cb8d925 Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Thu, 11 May 2023 21:24:31 +0200 Subject: [PATCH 188/686] 94299: checkstyle --- .../org/dspace/app/rest/BitstreamCategoryRestController.java | 1 - .../repository/patch/operation/BitstreamRemoveOperation.java | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java index 6d970eb10961..aa511bcb9282 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -20,7 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.ResponseEntity; -import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java index 77336002716f..b0e2a45c9d23 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -68,7 +68,8 @@ public boolean supports(Object objectToMatch, Operation operation) { operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); } - public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) throws SQLException { + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) + throws SQLException { try { authorizeService.authorizeAction(context, bitstream, operation); } catch (AuthorizeException e) { From 50e816f718b96b2cf2c3ebd40f4bcaea55cecdfc Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Thu, 11 May 2023 23:39:34 +0200 Subject: [PATCH 189/686] CST-9634 fix issues with date timezone and inheritance of default policies --- .../bulkaccesscontrol/BulkAccessControl.java | 81 ++++++------------- .../model/AccessCondition.java | 9 ++- .../util/MultiFormatDateDeserializer.java | 41 ++++++++++ .../test/data/dspaceFolder/config/local.cfg | 2 +- .../BulkAccessControlIT.java | 54 ++++--------- 5 files changed, 89 insertions(+), 98 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index af9d4e6c1f0e..ce0e63039c0f 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -14,11 +14,15 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.TimeZone; import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; @@ -81,7 +85,7 @@ public class BulkAccessControl extends DSpaceRunnable uuids; private Context context; @@ -124,12 +128,13 @@ public void setup() throws ParseException { .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); filename = commandLine.getOptionValue('f'); - uuids = commandLine.getOptionValues('u'); + uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null; } @Override public void internalRun() throws Exception { ObjectMapper mapper = new ObjectMapper(); + mapper.setTimeZone(TimeZone.getTimeZone("UTC")); AccessControl accessControl; context = new Context(Context.Mode.BATCH_EDIT); assignCurrentUserInContext(); @@ -137,12 +142,9 @@ public void internalRun() throws Exception { context.turnOffAuthorisationSystem(); - if (uuids == null) { - handler.logError("A target uuid must be provided (run with -h flag for details)"); - throw new IllegalArgumentException("A target uuid must be provided"); - } else if (uuids.length <= 0) { - handler.logError("A target uuid must be provided with at least on uuid"); - throw new IllegalArgumentException("A target uuid must be provided with at least on uuid"); + if (uuids == null || uuids.size() == 0) { + handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)"); + throw new IllegalArgumentException("At least one target uuid must be provided"); } InputStream inputStream = handler.getFileStream(context, filename) @@ -152,10 +154,9 @@ public void internalRun() throws Exception { try { accessControl = mapper.readValue(inputStream, AccessControl.class); } catch (IOException e) { - handler.logError("Error parsing json file"); + handler.logError("Error parsing json file " + e.getMessage()); throw new IllegalArgumentException("Error parsing json file", e); } - try { validate(accessControl); updateItemsAndBitstreamsPolices(accessControl); @@ -231,12 +232,12 @@ private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQ } private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException { - if (uuids.length > 1 && containsConstraints(bitstream)) { + if (uuids.size() > 1 && containsConstraints(bitstream)) { handler.logError("constraint isn't supported when multiple uuids are provided"); throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided"); } else { DSpaceObject dso = - dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids[0])); + dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0))); if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) { handler.logError("constraint is not supported when uuid isn't an Item"); @@ -296,11 +297,11 @@ public void updateItemsAndBitstreamsPolices(AccessControl accessControl) } } - private String buildSolrQuery(String[] uuids) throws SQLException { - String [] query = new String[uuids.length]; + private String buildSolrQuery(List uuids) throws SQLException { + String [] query = new String[uuids.size()]; for (int i = 0 ; i < query.length ; i++) { - DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids[i])); + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i))); if (dso.getType() == Constants.COMMUNITY) { query[i] = "location.comm:" + dso.getID(); @@ -348,35 +349,23 @@ private void updateItemPolicies(Item item, AccessControl accessControl) throws S } private void setItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { - AccessConditionItem itemControl = accessControl.getItem(); - - if (isAdjustPoliciesNeeded(item, itemControl.getMode(), itemControl.getAccessConditions())) { - itemService.adjustItemPolicies(context, item, item.getOwningCollection()); - } - accessControl .getItem() .getAccessConditions() .forEach(accessCondition -> createResourcePolicy(item, accessCondition, itemAccessConditions.get(accessCondition.getName()))); - } - private boolean isAdjustPoliciesNeeded(Item item, String mode, List accessConditions) { - return (isAppendModeDisabled() && item.isArchived()) || - (REPLACE_MODE.equals(mode) && CollectionUtils.isEmpty(accessConditions)); + itemService.adjustItemPolicies(context, item, item.getOwningCollection()); } private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { - - if (containsConstraints(accessControl.getBitstream())) { - findMatchedBitstreams(item, accessControl.getBitstream().getConstraint().getUuids()) + item.getBundles(Constants.CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> uuids == null || + uuids.size() == 0 || + uuids.contains(bitstream.getID().toString())) .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); - } else { - findAllBitstreams(item) - .forEach(bitstream -> - updateBitstreamPolicies(bitstream, item, accessControl)); - } } private boolean containsConstraints(AccessConditionBitstream bitstream) { @@ -385,20 +374,6 @@ private boolean containsConstraints(AccessConditionBitstream bitstream) { isNotEmpty(bitstream.getConstraint().getUuids()); } - private List findMatchedBitstreams(Item item, List uuids) { - return item.getBundles().stream() - .flatMap(bundle -> bundle.getBitstreams().stream()) - .filter(bitstream -> uuids.contains(bitstream.getID().toString())) - .collect(Collectors.toList()); - } - - private List findAllBitstreams(Item item) { - return item.getBundles() - .stream() - .flatMap(bundle -> bundle.getBitstreams().stream()) - .collect(Collectors.toList()); - } - private void updateBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) { if (REPLACE_MODE.equals(accessControl.getBitstream().getMode())) { @@ -424,17 +399,12 @@ private void removeReadPolicies(DSpaceObject dso, String type) { private void setBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) throws SQLException, AuthorizeException { - AccessConditionBitstream bitstreamControl = accessControl.getBitstream(); - - if (isAdjustPoliciesNeeded(item, bitstreamControl.getMode(), bitstreamControl.getAccessConditions())) { - itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); - } - accessControl.getBitstream() .getAccessConditions() .forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition, uploadAccessConditions.get(accessCondition.getName()))); + itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); } private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, @@ -466,11 +436,6 @@ private void assignSpecialGroupsInContext() throws SQLException { } } - private boolean isAppendModeDisabled() { - return !configurationService.getBooleanProperty( - "core.authorization.installitem.inheritance-read.append-mode"); - } - @Override @SuppressWarnings("unchecked") public BulkAccessControlScriptConfiguration getScriptConfiguration() { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java index 40ea2d0e25f4..6cf95e0e2179 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java @@ -9,9 +9,12 @@ import java.util.Date; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; +import org.dspace.util.MultiFormatDateDeserializer; + /** - * Class that model the value of ACCESS_CONDITION_CELL - * of sheet BITSTREAM_METADATA of the Bulk import excel. + * Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file * * @author Mohamed Eskander (mohamed.eskander at 4science.it) */ @@ -21,8 +24,10 @@ public class AccessCondition { private String description; + @JsonDeserialize(using = MultiFormatDateDeserializer.class) private Date startDate; + @JsonDeserialize(using = MultiFormatDateDeserializer.class) private Date endDate; public AccessCondition() { diff --git a/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java new file mode 100644 index 000000000000..2b6f37beb2e1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.IOException; +import java.util.Date; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; + +/** + * This is a custom date deserializer for jackson that make use of our + * {@link MultiFormatDateParser} + * + * Dates are parsed as being in the UTC zone. + * + */ +public class MultiFormatDateDeserializer extends StdDeserializer { + + public MultiFormatDateDeserializer() { + this(null); + } + + public MultiFormatDateDeserializer(Class vc) { + super(vc); + } + + @Override + public Date deserialize(JsonParser jsonparser, DeserializationContext context) + throws IOException, JsonProcessingException { + String date = jsonparser.getText(); + return MultiFormatDateParser.parse(date); + } +} \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index c009acb30e1d..5111b963a0e1 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -43,7 +43,7 @@ dspace.server.url = http://localhost db.driver = org.h2.Driver db.dialect=org.hibernate.dialect.H2Dialect # Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors -db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=VALUE +db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=VALUE;TIME ZONE=UTC db.username = sa db.password = # H2's default schema is PUBLIC diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index b63d31f52464..26963c845e1d 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -276,7 +276,7 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti " \"accessConditions\": [\n" + " {\n" + " \"name\": \"embargo\",\n" + - " \"endDate\": \"2024-06-24T23:59:59.999+0000\"\n" + + " \"endDate\": \"2024-06-24T00:00:00Z\"\n" + " }\n" + " ]\n" + " }}\n"; @@ -296,7 +296,7 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti " \"accessConditions\": [\n" + " {\n" + " \"name\": \"lease\",\n" + - " \"startDate\": \"2024-06-24T23:59:59.999+0000\"\n" + + " \"startDate\": \"2024-06-24T00:00:00Z\"\n" + " }\n" + " ]\n" + " }}\n"; @@ -329,7 +329,7 @@ public void performBulkAccessWithValidJsonTest() throws Exception { .build(); Community subCommunityThree = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("sub community two") + .withName("sub community three") .build(); Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) @@ -344,9 +344,9 @@ public void performBulkAccessWithValidJsonTest() throws Exception { .withName("collection three") .build(); - ItemBuilder.createItem(context, collectionOne).build(); + Item itemOne = ItemBuilder.createItem(context, collectionOne).build(); - ItemBuilder.createItem(context, collectionTwo).build(); + Item itemTwo = ItemBuilder.createItem(context, collectionTwo).build(); Item itemThree = ItemBuilder.createItem(context, collectionThree).withTitle("item three title").build(); @@ -359,7 +359,7 @@ public void performBulkAccessWithValidJsonTest() throws Exception { " \"accessConditions\": [\n" + " {\n" + " \"name\": \"embargo\",\n" + - " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " \"startDate\": \"2024-06-24\"\n" + " }\n" + " ]\n" + " }}\n"; @@ -380,28 +380,26 @@ public void performBulkAccessWithValidJsonTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); - Iterator itemIteratorOne = itemService.findByCollection(context, collectionOne); - Iterator itemIteratorTwo = itemService.findByCollection(context, collectionTwo); + itemOne = context.reloadEntity(itemOne); + itemTwo = context.reloadEntity(itemTwo); itemThree = context.reloadEntity(itemThree); itemFour = context.reloadEntity(itemFour); Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + assertThat(itemOne.getResourcePolicies(), hasSize(1)); + assertThat(itemOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); -// matchItemsResourcePolicies(itemIteratorOne, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null); -// matchItemsResourcePolicies(itemIteratorTwo, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null); -// matchItemResourcePolicies(itemThree, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null); - - assertThat(itemThree.getResourcePolicies(), hasSize(2)); - assertThat(itemThree.getResourcePolicies(), containsInAnyOrder( - matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), - matches(READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24T00:00:00.000Z", null, null) + assertThat(itemTwo.getResourcePolicies(), hasSize(1)); + assertThat(itemTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) )); - // just a note here is working fine + assertThat(itemThree.getResourcePolicies(), hasSize(1)); assertThat(itemThree.getResourcePolicies(), hasItem( - matches(READ, anonymousGroup, "embargo", TYPE_CUSTOM, - itemThree.getResourcePolicies().get(0).getStartDate(), null, null) + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) )); assertThat(itemFour.getResourcePolicies().size(), is(1)); @@ -409,24 +407,6 @@ public void performBulkAccessWithValidJsonTest() throws Exception { matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) )); - - - - } - - private void matchItemsResourcePolicies( - Iterator itemIterator, Group group, String rpName, String rpType, String startDate, String endDate) { - while (itemIterator.hasNext()) { - Item item = itemIterator.next(); - matchItemResourcePolicies(item, group, rpName, rpType, startDate, endDate); - } - } - - private void matchItemResourcePolicies( - Item item, Group group, String rpName, String rpType, String startDate, String endDate) { - - assertThat(item.getResourcePolicies(), hasItem( - matches(READ, group, rpName, rpType, startDate, endDate, null))); } private void buildJsonFile(String json) throws IOException { From 2cf5b290b624efca51c3d2f35f1257bc828bfaba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= <116575333+aroman-arvo@users.noreply.github.com> Date: Thu, 11 May 2023 23:59:38 +0200 Subject: [PATCH 190/686] Fix "Simultaneous deletion of multiple bitstreams from the same bundle often compromises the state of the bundle" (#8778) * DS-8694 * clean imports * cleaning code * DS-8694 - re-deletion of bitstream throw exception * ds-8694 - clean code --- .../dspace/app/rest/repository/DSpaceRestRepository.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 01f127eca5ac..a93f5e55dc02 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -195,7 +195,11 @@ public long count() { /** * Delete the object identified by its ID */ - public void deleteById(ID id) { + /** + * Method should be synchronized to avoid hibernate partial deletion bug when deleting multiple bitstreams: + * https://github.com/DSpace/DSpace/issues/8694 + */ + public synchronized void deleteById(ID id) { Context context = obtainContext(); try { getThisRepository().delete(context, id); From 03c39cf6f1439e5393f9df5028fc64b3ba1cc837 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Mon, 17 Apr 2023 14:40:43 +1200 Subject: [PATCH 191/686] Bitstreams should inherit DEFAULT_BITSTREAM_READ, then DEFAULT_ITEM_READ When generating automatic policies, bitstream DSOs have always inherited from DEFAULT_ITEM_READ, even if the collection's DEFAULT_BITSTREAM_READ policy differs. This bugfix checks DEFAULT_BITSTREAM_READ first, and uses DEFAULT_ITEM_READ as a fallback. --- .../org/dspace/authorize/AuthorizeServiceImpl.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 34543c078ae2..e8e639a0b20f 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -674,8 +674,17 @@ public void generateAutomaticPolicies(Context context, Date embargoDate, throws SQLException, AuthorizeException { if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - - List authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_BITSTREAM_READ); + // Get DEFAULT_ITEM_READ policy from the collection + List defaultItemReadGroups = + getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); + // By default, use DEFAULT_BITSTREAM_READ. Otherwise, use DEFAULT_ITEM_READ + List authorizedGroups = defaultBitstreamReadGroups; + if (defaultBitstreamReadGroups.isEmpty()) { + authorizedGroups = defaultItemReadGroups; + } removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); From 019d0319dc8174611f5a37b9b1c4960baf2612f2 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 14:01:14 +1200 Subject: [PATCH 192/686] #8783 New bitstreams inherit DEFAULT_BITSTREAM_READ from owning collection Otherwise will keep inherited READ from bundle. Does not affect embargos set during submission or workflow, these will always be applied afterwards. --- .../org/dspace/content/BundleServiceImpl.java | 23 +++++++ .../app/rest/BitstreamRestControllerIT.java | 60 ++++++++++++++++++- 2 files changed, 81 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 485f1d645130..01bee702bb58 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -34,6 +34,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.eperson.Group; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -173,6 +174,28 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); + if (owningItem != null) { + // Resolve owning collection + Collection owningCollection = owningItem.getOwningCollection(); + if (owningCollection != null) { + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + authorizeService.getAuthorizedGroups(context, owningCollection, + Constants.DEFAULT_BITSTREAM_READ); + log.info(defaultBitstreamReadGroups.size()); + // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy + // inherited from the bundle with this policy. + if (!defaultBitstreamReadGroups.isEmpty()) { + // Remove read policies from the bitstream + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) { + // Inherit this policy as READ, directly from the collection roles + authorizeService.addPolicy(context, bitstream, + Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED); + } + } + } + } bitstreamService.update(context, bitstream); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 09dbdca505fe..79b7bd22c1ab 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,8 +17,7 @@ import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; -import static org.dspace.core.Constants.READ; -import static org.dspace.core.Constants.WRITE; +import static org.dspace.core.Constants.*; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; @@ -56,6 +55,8 @@ import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; @@ -70,6 +71,7 @@ import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.disseminate.CitationDocumentServiceImpl; import org.dspace.eperson.EPerson; @@ -112,6 +114,12 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest @Autowired private BitstreamFormatService bitstreamFormatService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private CollectionService collectionService; + private Bitstream bitstream; private BitstreamFormat supportedFormat; private BitstreamFormat knownFormat; @@ -626,6 +634,54 @@ public void testPrivateBitstream() throws Exception { } + @Test + public void testBitstreamDefaultReadInheritanceFromCollection() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community and one collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + // Explicitly create a restrictive default bitstream read policy on the collection + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + authorizeService.removePoliciesActionFilter(context, col1, DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, col1, DEFAULT_BITSTREAM_READ, internalGroup); + + //2. A public item with a new bitstream that is not explicitly restricted + // but should instead inherit + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + // make sure this item has no default policies for a new bundle to inherit + authorizeService.removePoliciesActionFilter(context, publicItem1, DEFAULT_BITSTREAM_READ); + + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Test Restricted Bitstream") + .withDescription("This bitstream is restricted") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + //** WHEN ** + //We download the bitstream + getClient().perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + //** THEN ** + .andExpect(status().isUnauthorized()); + + //An unauthorized request should not log statistics + checkNumberOfStatsRecords(bitstream, 0); + } + @Test public void restrictedGroupBitstreamForbiddenTest() throws Exception { context.turnOffAuthorisationSystem(); From 4a88573dc1462080e92735b21f3729e7281c766b Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 14:18:08 +1200 Subject: [PATCH 193/686] #8783 Strip out unused generateAutomaticPolicies method from AuthorizeService --- .../authorize/AuthorizeServiceImpl.java | 63 ------------------- .../authorize/service/AuthorizeService.java | 20 +----- 2 files changed, 1 insertion(+), 82 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index e8e639a0b20f..8058caf6696c 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -654,69 +654,6 @@ public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group g } } - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context The relevant DSpace Context. - * @param embargoDate embargo end date - * @param reason embargo reason - * @param dso DSpace object - * @param owningCollection collection to get group policies from - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - @Override - public void generateAutomaticPolicies(Context context, Date embargoDate, - String reason, DSpaceObject dso, Collection owningCollection) - throws SQLException, AuthorizeException { - - if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - // Get DEFAULT_BITSTREAM_READ policy from the collection - List defaultBitstreamReadGroups = - getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_BITSTREAM_READ); - // Get DEFAULT_ITEM_READ policy from the collection - List defaultItemReadGroups = - getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); - // By default, use DEFAULT_BITSTREAM_READ. Otherwise, use DEFAULT_ITEM_READ - List authorizedGroups = defaultBitstreamReadGroups; - if (defaultBitstreamReadGroups.isEmpty()) { - authorizedGroups = defaultItemReadGroups; - } - - removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); - - // look for anonymous - boolean isAnonymousInPlace = false; - for (Group g : authorizedGroups) { - if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) { - isAnonymousInPlace = true; - } - } - if (!isAnonymousInPlace) { - // add policies for all the groups - for (Group g : authorizedGroups) { - ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ, - reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - - } else { - // add policy just for anonymous - ResourcePolicy rp = createOrModifyPolicy(null, context, null, - groupService.findByName(context, Group.ANONYMOUS), null, - embargoDate, Constants.READ, reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - } - } - @Override public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 36679f94c6a4..14a7ff5c82bb 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -470,24 +470,6 @@ public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Grou public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException; - - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context current context - * @param embargoDate date - * @param reason reason - * @param dso DSpaceObject - * @param owningCollection collection - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso, - Collection owningCollection) throws SQLException, AuthorizeException; - public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException; @@ -603,7 +585,7 @@ long countAdminAuthorizedCollection(Context context, String query) /** * Replace all the policies in the target object with exactly the same policies that exist in the source object - * + * * @param context DSpace Context * @param source source of policies * @param dest destination of inherited policies From c2575b05b2f957adc7e2096f625c7d50264bf657 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 14:19:53 +1200 Subject: [PATCH 194/686] #8783 checkstyle fixes for integration test --- .../java/org/dspace/app/rest/BitstreamRestControllerIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 79b7bd22c1ab..7a4aa0c0077a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,7 +17,9 @@ import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; -import static org.dspace.core.Constants.*; +import static org.dspace.core.Constants.DEFAULT_BITSTREAM_READ; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; @@ -55,7 +57,6 @@ import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; -import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; From b69517da7444d23df76a54873a55b4936d15e2e3 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 17:47:56 +1200 Subject: [PATCH 195/686] #8783 don't inherit DEFAULT_BITSTREAM_READ if item has current embargo --- .../org/dspace/content/BundleServiceImpl.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 01bee702bb58..3fb78ee6c8ac 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -7,13 +7,12 @@ */ package org.dspace.content; -import static org.dspace.core.Constants.ADD; -import static org.dspace.core.Constants.REMOVE; -import static org.dspace.core.Constants.WRITE; +import static org.dspace.core.Constants.*; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -174,7 +173,18 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); - if (owningItem != null) { + // The next logic is a bit overly cautious but ensures that if there are any future start dates + // on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection + // just in case. In practice, the item install process would overwrite these anyway but it may satisfy + // some other bitstream creation methods and integration tests + boolean isEmbargoed = false; + for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) { + if (!resourcePolicyService.isDateValid(resourcePolicy)) { + isEmbargoed = true; + break; + } + } + if (owningItem != null && !isEmbargoed) { // Resolve owning collection Collection owningCollection = owningItem.getOwningCollection(); if (owningCollection != null) { From 694339993840a4378207c7846bf78f69fe7d318a Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 18:03:02 +1200 Subject: [PATCH 196/686] #8783 checkstyle fixes --- .../org/dspace/content/BundleServiceImpl.java | 66 ++++++++++--------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 3fb78ee6c8ac..20c43e4bfc73 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -7,12 +7,14 @@ */ package org.dspace.content; -import static org.dspace.core.Constants.*; +import static org.dspace.core.Constants.ADD; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.REMOVE; +import static org.dspace.core.Constants.WRITE; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -74,14 +76,14 @@ public Bundle find(Context context, UUID id) throws SQLException { if (bundle == null) { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "not_found,bundle_id=" + id)); + "not_found,bundle_id=" + id)); } return null; } else { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "bundle_id=" + id)); + "bundle_id=" + id)); } return bundle; @@ -106,7 +108,7 @@ public Bundle create(Context context, Item item, String name) throws SQLExceptio log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); // if we ever use the identifier service for bundles, we should // create the bundle before we create the Event and should add all @@ -132,12 +134,12 @@ public Bitstream getBitstreamByName(Bundle bundle, String name) { @Override public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.ADD); log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id=" - + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + + bundle.getID() + ",bitstream_id=" + bitstream.getID())); // First check that the bitstream isn't already in the list List bitstreams = bundle.getBitstreams(); @@ -167,8 +169,8 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... @@ -211,17 +213,17 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) @Override public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws AuthorizeException, SQLException, IOException { + throws AuthorizeException, SQLException, IOException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.REMOVE); log.info(LogHelper.getHeader(context, "remove_bitstream", - "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); //Ensure that the last modified from the item is triggered ! Item owningItem = (Item) getParentObject(context, bundle); @@ -254,9 +256,9 @@ public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) @Override public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List policies = authorizeService.getPoliciesActionFilter(context, collection, - Constants.DEFAULT_BITSTREAM_READ); + Constants.DEFAULT_BITSTREAM_READ); // change the action to just READ // just don't call update on the resourcepolicies!!! @@ -264,7 +266,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col if (!i.hasNext()) { throw new java.sql.SQLException("Collection " + collection.getID() - + " has no default bitstream READ policies"); + + " has no default bitstream READ policies"); } List newPolicies = new ArrayList(); @@ -279,7 +281,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col @Override public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List newpolicies) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List bitstreams = bundle.getBitstreams(); if (CollectionUtils.isNotEmpty(bitstreams)) { for (Bitstream bs : bitstreams) { @@ -401,16 +403,16 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws if (bitstream == null) { //This should never occur but just in case log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; } // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { log.warn(LogHelper.getHeader(context, - "Encountered a bitstream not in this bundle while changing bitstream " + - "order. Bitstream order will not be changed.", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Encountered a bitstream not in this bundle while changing bitstream " + + "order. Bitstream order will not be changed.", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); @@ -419,9 +421,9 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { log.warn(LogHelper.getHeader(context, - "Size of old list and new list do not match. Bitstream order will not be " + - "changed.", - "Bundle: " + bundle.getID())); + "Size of old list and new list do not match. Bitstream order will not be " + + "changed.", + "Bundle: " + bundle.getID())); return; } @@ -467,7 +469,7 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t } else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamDeletion()) { + .canCommunityAdminPerformBitstreamDeletion()) { adminObject = community; } break; @@ -475,10 +477,10 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) { adminObject = item; } else if (AuthorizeConfiguration - .canCollectionAdminPerformBitstreamCreation()) { + .canCollectionAdminPerformBitstreamCreation()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamCreation()) { + .canCommunityAdminPerformBitstreamCreation()) { adminObject = community; } break; @@ -510,7 +512,7 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz // Check authorisation //AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE); log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); super.update(context, bundle); bundleDAO.save(context, bundle); @@ -518,10 +520,10 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz if (bundle.isModified() || bundle.isMetadataModified()) { if (bundle.isMetadataModified()) { context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(), - getIdentifiers(context, bundle))); + getIdentifiers(context, bundle))); } context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(), - null, getIdentifiers(context, bundle))); + null, getIdentifiers(context, bundle))); bundle.clearModified(); bundle.clearDetails(); } @@ -530,12 +532,12 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz @Override public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException { log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); authorizeService.authorizeAction(context, bundle, Constants.DELETE); context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(), - bundle.getName(), getIdentifiers(context, bundle))); + bundle.getName(), getIdentifiers(context, bundle))); // Remove bitstreams List bitstreams = bundle.getBitstreams(); From 44c066d07aec07e7ec3796a60d6214ec29c57ec3 Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 12 May 2023 09:12:05 +0300 Subject: [PATCH 197/686] refactoring and adding new test cases --- .../bulkaccesscontrol/BulkAccessControl.java | 14 +- .../model/AccessConditionBitstream.java | 20 +- .../BulkAccessControlIT.java | 476 +++++++++++++++++- 3 files changed, 466 insertions(+), 44 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index af9d4e6c1f0e..88accf2dba43 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -152,7 +152,7 @@ public void internalRun() throws Exception { try { accessControl = mapper.readValue(inputStream, AccessControl.class); } catch (IOException e) { - handler.logError("Error parsing json file"); + handler.logError("Error parsing json file " + e.getMessage()); throw new IllegalArgumentException("Error parsing json file", e); } @@ -234,7 +234,7 @@ private void validateConstraint(AccessConditionBitstream bitstream) throws SQLEx if (uuids.length > 1 && containsConstraints(bitstream)) { handler.logError("constraint isn't supported when multiple uuids are provided"); throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided"); - } else { + } else if (uuids.length == 1 && containsConstraints(bitstream)) { DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids[0])); @@ -256,7 +256,7 @@ private void validateAccessCondition(AccessCondition accessCondition) { itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy( context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate()); } catch (Exception e) { - handler.logError("invalid access condition" + e.getMessage()); + handler.logError("invalid access condition, " + e.getMessage()); handler.handleException(e); } } @@ -274,7 +274,7 @@ public void updateItemsAndBitstreamsPolices(AccessControl accessControl) while (itemIterator.hasNext()) { - Item item = itemIterator.next(); + Item item = context.reloadEntity(itemIterator.next()); if (Objects.nonNull(accessControl.getItem())) { updateItemPolicies(item, accessControl); @@ -370,7 +370,7 @@ private boolean isAdjustPoliciesNeeded(Item item, String mode, List updateBitstreamPolicies(bitstream, item, accessControl)); } else { findAllBitstreams(item) @@ -381,8 +381,8 @@ private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { private boolean containsConstraints(AccessConditionBitstream bitstream) { return Objects.nonNull(bitstream) && - Objects.nonNull(bitstream.getConstraint()) && - isNotEmpty(bitstream.getConstraint().getUuids()); + Objects.nonNull(bitstream.getConstraints()) && + isNotEmpty(bitstream.getConstraints().getUuid()); } private List findMatchedBitstreams(Item item, List uuids) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java index f9f5cbce8de8..46354ddff41c 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -17,7 +17,7 @@ public class AccessConditionBitstream { private String mode; - private Constraint constraint; + private Constraint constraints; private List accessConditions; @@ -29,12 +29,12 @@ public void setMode(String mode) { this.mode = mode; } - public Constraint getConstraint() { - return constraint; + public Constraint getConstraints() { + return constraints; } - public void setConstraint(Constraint constraint) { - this.constraint = constraint; + public void setConstraints(Constraint constraints) { + this.constraints = constraints; } public List getAccessConditions() { @@ -50,14 +50,14 @@ public void setAccessConditions(List accessConditions) { public class Constraint { - private List uuids; + private List uuid; - public List getUuids() { - return uuids; + public List getUuid() { + return uuid; } - public void setUuids(List uuids) { - this.uuids = uuids; + public void setUuid(List uuid) { + this.uuid = uuid; } } diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index b63d31f52464..88062b1f664b 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -22,6 +23,7 @@ import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -30,25 +32,37 @@ import java.util.Date; import java.util.Iterator; import java.util.List; +import java.util.UUID; import java.util.stream.Collectors; import javax.validation.constraints.AssertTrue; +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.PathUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.matcher.ResourcePolicyMatcher; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.authorize.ResourcePolicy; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.Group; import org.dspace.eperson.GroupTest; import org.dspace.eperson.factory.EPersonServiceFactory; @@ -70,10 +84,10 @@ public class BulkAccessControlIT extends AbstractIntegrationTestWithDatabase { private Path tempDir; private String tempFilePath; - private Collection collection; private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + private SearchService searchService = SearchUtils.getSearchService(); @Before @Override @@ -81,19 +95,6 @@ public void setUp() throws Exception { super.setUp(); - context.turnOffAuthorisationSystem(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - collection = CollectionBuilder.createCollection(context, parentCommunity) - .withName("Collection") - .withEntityType("Publication") - .build(); - - context.restoreAuthSystemState(); - tempDir = Files.createTempDirectory("bulkAccessTest"); tempFilePath = tempDir + "/bulk-access.json"; } @@ -117,6 +118,10 @@ public void performBulkAccessWithoutRequiredParamTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("A target uuid must be provided") + )); } @Test @@ -144,6 +149,10 @@ public void performBulkAccessWithEmptyJsonTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Error parsing json file") + )); } @Test @@ -180,6 +189,32 @@ public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong value for item mode") + )); + + json = "{ \"item\": {\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("item mode node must be provided") + )); } @Test @@ -216,6 +251,32 @@ public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exceptio assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong value for bitstream mode") + )); + + json = "{ \"bitstream\": {\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("bitstream mode node must be provided") + )); } @Test @@ -252,7 +313,9 @@ public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Except assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); - context.restoreAuthSystemState(); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong access condition ") + )); } @Test @@ -291,6 +354,10 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("invalid access condition, The access condition embargo requires a start date.") + )); + String jsonTwo = "{ \"item\": {\n" + " \"mode\": \"add\",\n" + " \"accessConditions\": [\n" + @@ -310,6 +377,127 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("invalid access condition, The access condition lease requires an end date.") + )); + } + + @Test + public void performBulkAccessForCommunityItemsWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + UUID.randomUUID() + "\"]\n" + + " },\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("constraint is not supported when uuid isn't an Item") + )); + } + + @Test + public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + Community communityTwo = CommunityBuilder.createCommunity(context) + .withName("community two") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + UUID.randomUUID() + "\"]\n" + + " },\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-u", communityTwo.getID().toString(), + "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("constraint isn't supported when multiple uuids are provided") + )); + } + + @Test + public void performBulkAccessWithAddModeAndEmptyAccessConditionsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\"\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", "-u", parentCommunity.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("accessConditions of item must be provided with mode") + )); } @Test @@ -317,32 +505,32 @@ public void performBulkAccessWithValidJsonTest() throws Exception { context.turnOffAuthorisationSystem(); Community parentCommunity = CommunityBuilder.createCommunity(context) - .withName("parent community") - .build(); + .withName("parent community") + .build(); Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("sub community one") - .build(); + .withName("sub community one") + .build(); Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) .withName("sub community two") .build(); Community subCommunityThree = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("sub community two") - .build(); + .withName("sub community two") + .build(); Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) - .withName("collection one") - .build(); + .withName("collection one") + .build(); Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) - .withName("collection two") - .build(); + .withName("collection two") + .build(); Collection collectionThree = CollectionBuilder.createCollection(context, subCommunityThree) - .withName("collection three") - .build(); + .withName("collection three") + .build(); ItemBuilder.createItem(context, collectionOne).build(); @@ -412,6 +600,240 @@ public void performBulkAccessWithValidJsonTest() throws Exception { + } + + @Test + public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) + .withName("collection two") + .build(); + + for (int i = 0; i < 20 ; i++) { + ItemBuilder.createItem(context, collectionOne).build(); + } + + for (int i = 0; i < 5 ; i++) { + Item item = ItemBuilder.createItem(context, collectionTwo).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\"\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\"\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-f", tempFilePath + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + List itemsOfSubCommTwo = findItems("location.comm:" + subCommunityTwo.getID()); + + assertThat(itemsOfSubCommOne, hasSize(10)); + assertThat(itemsOfSubCommTwo, hasSize(5)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(0L)); + + assertThat(itemsOfSubCommTwo.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(5L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(1)); + assertThat(item.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + + for (Item item : itemsOfSubCommTwo) { + assertThat(item.getResourcePolicies(), hasSize(1)); + assertThat(item.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(1)); + assertThat(bitstream.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + } + } + + @Test + public void performBulkAccessWithAddModeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + for (int i = 0; i < 5 ; i++) { + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24T00:00:00.000Z\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + + assertThat(itemsOfSubCommOne, hasSize(5)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(5L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(3)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + //TODO add also the third resource policy embargo + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(3)); + assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + //TODO add also the third resource policy lease + )); + } + } + } + + private List findItems(String query) throws SearchServiceException { + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()); + } + + private List findAllBitstreams(Item item) { + return item.getBundles() + .stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); } private void matchItemsResourcePolicies( From 27f874a08b5bd0429fc3132671738c64cabdf095 Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 12 May 2023 10:58:17 +0300 Subject: [PATCH 198/686] refactoring and adding new test cases --- .../bulkaccesscontrol/BulkAccessControl.java | 21 +- .../BulkAccessControlIT.java | 237 ++++++++++++++---- 2 files changed, 209 insertions(+), 49 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index a9ca39f9fa06..bb44473ebc81 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -10,12 +10,11 @@ import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; -import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.Iterator; @@ -363,12 +362,13 @@ private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { if (containsConstraints(accessControl.getBitstream())) { findMatchedBitstreams(item, accessControl.getBitstream().getConstraints().getUuid()) - item.getBundles(Constants.CONTENT_BUNDLE_NAME).stream() - .flatMap(bundle -> bundle.getBitstreams().stream()) - .filter(bitstream -> uuids == null || - uuids.size() == 0 || - uuids.contains(bitstream.getID().toString())) .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); + } else { + item.getBundles(CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .forEach(bitstream -> + updateBitstreamPolicies(bitstream, item, accessControl)); + } } private boolean containsConstraints(AccessConditionBitstream bitstream) { @@ -377,6 +377,13 @@ private boolean containsConstraints(AccessConditionBitstream bitstream) { isNotEmpty(bitstream.getConstraints().getUuid()); } + private List findMatchedBitstreams(Item item, List uuids) { + return item.getBundles(CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> uuids.contains(bitstream.getID().toString())) + .collect(Collectors.toList()); + } + private void updateBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) { if (REPLACE_MODE.equals(accessControl.getBitstream().getMode())) { diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index 0e07284c68ae..21efe0420f3a 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -9,14 +9,12 @@ import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; import static org.dspace.core.Constants.READ; import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -28,21 +26,17 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.text.SimpleDateFormat; -import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; -import javax.validation.constraints.AssertTrue; - import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.PathUtils; +import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; -import org.dspace.app.matcher.ResourcePolicyMatcher; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.authorize.ResourcePolicy; import org.dspace.builder.BitstreamBuilder; @@ -64,17 +58,11 @@ import org.dspace.discovery.SearchUtils; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.Group; -import org.dspace.eperson.GroupTest; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.GroupService; -import org.dspace.matcher.DateMatcher; -import org.dspace.util.MultiFormatDateParser; -import org.dspace.utils.DSpace; import org.junit.After; -import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; /** * Basic integration testing for the Bulk Access conditions Feature{@link BulkAccessControl}. @@ -120,7 +108,7 @@ public void performBulkAccessWithoutRequiredParamTest() throws Exception { assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( - containsString("A target uuid must be provided") + containsString("A target uuid must be provided with at least on uuid") )); } @@ -401,7 +389,7 @@ public void performBulkAccessForCommunityItemsWithBitstreamConstraintsTest() thr " \"accessConditions\": [\n" + " {\n" + " \"name\": \"embargo\",\n" + - " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " \"startDate\": \"2024-06-24\"\n" + " }\n" + " ]\n" + " }\n" + @@ -446,7 +434,7 @@ public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() thro " \"accessConditions\": [\n" + " {\n" + " \"name\": \"embargo\",\n" + - " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " \"startDate\": \"2024-06-24\"\n" + " }\n" + " ]\n" + " }\n" + @@ -470,6 +458,85 @@ public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() thro )); } + @Test + public void performBulkAccessForSingleItemWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamOneContent = "Dummy content one"; + Bitstream bitstreamOne; + try (InputStream is = IOUtils.toInputStream(bitstreamOneContent, CharEncoding.UTF_8)) { + bitstreamOne = BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream one") + .build(); + } + + String bitstreamTwoContent = "Dummy content of bitstream two"; + Bitstream bitstreamTwo; + try (InputStream is = IOUtils.toInputStream(bitstreamTwoContent, CharEncoding.UTF_8)) { + bitstreamTwo = BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream two") + .build(); + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + bitstreamOne.getID().toString() + "\"]\n" + + " },\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", item.getID().toString(), + "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + + bitstreamOne = context.reloadEntity(bitstreamOne); + bitstreamTwo = context.reloadEntity(bitstreamTwo); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(bitstreamOne.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(bitstreamTwo.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + @Test public void performBulkAccessWithAddModeAndEmptyAccessConditionsTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -517,10 +584,8 @@ public void performBulkAccessWithValidJsonTest() throws Exception { .build(); Community subCommunityThree = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("sub community two") + .withName("sub community three") .build(); - .withName("sub community three") - .build(); Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) .withName("collection one") @@ -754,7 +819,7 @@ public void performBulkAccessWithAddModeTest() throws Exception { " },\n" + " {\n" + " \"name\": \"embargo\",\n" + - " \"startDate\": \"2024-06-24T00:00:00.000Z\"\n" + + " \"startDate\": \"2024-06-24\"\n" + " }\n" + " ]\n" + " },\n" + @@ -766,7 +831,7 @@ public void performBulkAccessWithAddModeTest() throws Exception { " },\n" + " {\n" + " \"name\": \"lease\",\n" + - " \"endDate\": \"2023-06-24T00:00:00.000Z\"\n" + + " \"endDate\": \"2023-06-24\"\n" + " }\n" + " ]\n" + " }\n" + @@ -798,8 +863,8 @@ public void performBulkAccessWithAddModeTest() throws Exception { assertThat(item.getResourcePolicies(), hasSize(3)); assertThat(item.getResourcePolicies(), containsInAnyOrder( matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), - matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM) - //TODO add also the third resource policy embargo + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) )); List bitstreams = findAllBitstreams(item); @@ -808,8 +873,111 @@ public void performBulkAccessWithAddModeTest() throws Exception { assertThat(bitstream.getResourcePolicies(), hasSize(3)); assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), - matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM) - //TODO add also the third resource policy lease + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + } + } + } + + @Test + public void performBulkAccessWithReplaceModeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + for (int i = 0; i < 3 ; i++) { + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + + assertThat(itemsOfSubCommOne, hasSize(3)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(3L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(2)); + assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) )); } } @@ -830,27 +998,12 @@ private List findItems(String query) throws SearchServiceException { } private List findAllBitstreams(Item item) { - return item.getBundles() + return item.getBundles(CONTENT_BUNDLE_NAME) .stream() .flatMap(bundle -> bundle.getBitstreams().stream()) .collect(Collectors.toList()); } - private void matchItemsResourcePolicies( - Iterator itemIterator, Group group, String rpName, String rpType, String startDate, String endDate) { - while (itemIterator.hasNext()) { - Item item = itemIterator.next(); - matchItemResourcePolicies(item, group, rpName, rpType, startDate, endDate); - } - } - - private void matchItemResourcePolicies( - Item item, Group group, String rpName, String rpType, String startDate, String endDate) { - - assertThat(item.getResourcePolicies(), hasItem( - matches(READ, group, rpName, rpType, startDate, endDate, null))); - } - private void buildJsonFile(String json) throws IOException { File file = new File(tempDir + "/bulk-access.json"); Path path = Paths.get(file.getAbsolutePath()); From b24f121c767c1803882f1ae03d91d49e5a751eed Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 12 May 2023 10:11:09 +0200 Subject: [PATCH 199/686] 94299: checkstyle issue after main merge --- .../java/org/dspace/app/rest/BitstreamRestRepositoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 0b65f3e4b9e4..2a1044c28a04 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -37,10 +37,10 @@ import org.dspace.app.rest.matcher.BitstreamMatcher; import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; +import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.model.patch.ReplaceOperation; -import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.authorize.service.ResourcePolicyService; From 1cc6fb2f58018e73e0dc429c33a6d5fedbba5fcd Mon Sep 17 00:00:00 2001 From: damian Date: Fri, 12 May 2023 12:29:22 +0200 Subject: [PATCH 200/686] Item counters refactor. Removing unnecessary code. --- .../main/java/org/dspace/browse/ItemCounter.java | 10 ++++++++++ .../main/java/org/dspace/content/Collection.java | 2 +- .../org/dspace/content/CollectionServiceImpl.java | 6 +----- .../main/java/org/dspace/content/Community.java | 4 ++-- .../org/dspace/content/CommunityServiceImpl.java | 4 +--- .../java/org/dspace/content/dao/CollectionDAO.java | 3 --- .../dspace/content/dao/impl/CollectionDAOImpl.java | 14 ++++---------- .../app/rest/converter/CollectionConverter.java | 3 +-- .../app/rest/converter/CommunityConverter.java | 2 +- 9 files changed, 21 insertions(+), 27 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index c9c140fb0b5b..27e29ad9fa1c 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -18,6 +18,7 @@ import org.dspace.core.Context; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.web.ContextUtil; /** * This class provides a standard interface to all item counting @@ -49,6 +50,8 @@ public class ItemCounter { */ private Context context; + private static ItemCounter instance; + protected ItemService itemService; protected ConfigurationService configurationService; @@ -65,6 +68,13 @@ public ItemCounter(Context context) throws ItemCountException { this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); } + public static ItemCounter getInstance() throws ItemCountException { + if (instance == null) { + instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext()); + } + return instance; + } + /** * Get the count of the items in the given container. If the configuration * value webui.strengths.cache is equal to 'true' this will return the diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index 33fbb51e3fa7..6ee61abf5720 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -337,7 +337,7 @@ private CollectionService getCollectionService() { return collectionService; } - public int getAllItems() { + public int countArchivedItem() { try { return collectionService.countArchivedItem(this); } catch (ItemCountException e) { diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 961ad4cbcf0e..092451296e8d 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1051,10 +1051,6 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu @Override public int countArchivedItem(Collection collection) throws ItemCountException { - //TODO load ItemCounter on bean creation - Context context = new Context(); - return new ItemCounter(context).getCount(collection); + return ItemCounter.getInstance().getCount(collection); } - - } diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index 5b8baba8e498..d0477d116f3a 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -265,9 +265,9 @@ private CommunityService getCommunityService() { return communityService; } - public int getAllItems() { + public int countArchivedItem() { try { - return communityService.getArchivedItems(this); + return communityService.getArchivedItems(this); } catch (ItemCountException e) { throw new RuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index 7fe8dfd28951..496a06fd33a5 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -713,8 +713,6 @@ public int countTotal(Context context) throws SQLException { @Override public int getArchivedItems(Community community) throws ItemCountException { - //TODO load ItemCounter on bean creation - Context context = new Context(); - return new ItemCounter(context).getCount(community); + return ItemCounter.getInstance().getCount(community); } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java index c8014fb52097..6bb65bbb46d8 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/CollectionDAO.java @@ -10,7 +10,6 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; -import java.util.UUID; import org.dspace.content.Collection; import org.dspace.content.Item; @@ -54,6 +53,4 @@ public List findAll(Context context, MetadataField order, Integer li int countRows(Context context) throws SQLException; List> getCollectionsWithBitstreamSizesTotal(Context context) throws SQLException; - - int countArchivedItems(Context context, UUID id) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java index dc8bad7e19c4..c0ef6ea42fce 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java @@ -7,9 +7,11 @@ */ package org.dspace.content.dao.impl; -import java.math.BigInteger; import java.sql.SQLException; -import java.util.*; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; @@ -180,12 +182,4 @@ public List> getCollectionsWithBitstreamSizesTotal(C } return returnList; } - - @Override - public int countArchivedItems(Context context, UUID collectionId) throws SQLException { - return ((BigInteger) getHibernateSession(context) - .createNativeQuery("select count(*) from collection2item ci left join item i on i.uuid = ci.item_id where i.in_archive and uuid(ci.collection_id)=uuid(:collectionId)") - .setParameter("collectionId", collectionId.toString()) - .getSingleResult()).intValue(); - } } \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java index 9a5000852df5..836af81ef016 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java @@ -10,7 +10,6 @@ import org.dspace.app.rest.model.CollectionRest; import org.dspace.app.rest.projection.Projection; import org.dspace.content.Collection; -import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.springframework.stereotype.Component; @@ -27,7 +26,7 @@ public class CollectionConverter extends DSpaceObjectConverter Date: Fri, 12 May 2023 13:13:38 +0200 Subject: [PATCH 201/686] [CST-9634] Fix exception message --- .../org/dspace/app/rest/repository/ScriptRestRepository.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index 2fc996a327cc..2942539f9f79 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -110,7 +110,7 @@ public ProcessRest startProcess(Context context, String scriptName, List Date: Fri, 12 May 2023 13:15:48 +0200 Subject: [PATCH 202/686] [CST-9634] add administrativeBulkAccess discovery configuration --- dspace/config/spring/api/discovery.xml | 143 +++++++++++++++++++++++++ 1 file changed, 143 insertions(+) diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 611e77b27b1c..04b2fa761845 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -75,6 +75,7 @@ + @@ -725,6 +726,147 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + @@ -2524,6 +2666,7 @@ + placeholder.placeholder.placeholder From f844ca347b90feba504b10b6a1f15e05c0b81879 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Fri, 12 May 2023 13:24:28 +0100 Subject: [PATCH 203/686] Improved fix: override missing register method in VersionedDOIIdentifierProvider --- .../identifier/VersionedDOIIdentifierProvider.java | 9 +++++++++ .../app/rest/repository/IdentifierRestRepository.java | 3 +-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index a933e85d30dd..4374a2549ea9 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -153,6 +153,15 @@ public void register(Context context, DSpaceObject dso, String identifier) throw register(context, dso, identifier, this.filter); } + @Override + public String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException { + String doi = mint(context, dso, filter); + register(context, dso, doi, filter); + + return doi; + } + @Override public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java index b4a9688942c1..1be569d18e5d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java @@ -234,8 +234,7 @@ private IdentifierRest registerDOI(Context context, Item item) DOIIdentifierProvider doiIdentifierProvider = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName("org.dspace.identifier.DOIIdentifierProvider", DOIIdentifierProvider.class); if (doiIdentifierProvider != null) { - String doiValue = doiIdentifierProvider.mint(context, item, new TrueFilter()); - doiIdentifierProvider.register(context, item, doiValue, new TrueFilter()); + String doiValue = doiIdentifierProvider.register(context, item, new TrueFilter()); identifierRest.setValue(doiValue); // Get new status DOI doi = doiService.findByDoi(context, doiValue); From 07fbe151260e5952c523bad7d8384c5c4792e052 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Fri, 12 May 2023 15:35:22 +0300 Subject: [PATCH 204/686] refactoring and added new IT class --- .../bulkaccesscontrol/BulkAccessControl.java | 37 +- .../BulkAccessControlScriptConfiguration.java | 7 +- .../BulkAccessControlIT.java | 2 - .../config/spring/rest/scripts.xml | 5 - .../BulkAccessControlScriptIT.java | 502 ++++++++++++++++++ dspace/config/spring/rest/scripts.xml | 5 - 6 files changed, 514 insertions(+), 44 deletions(-) create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index bb44473ebc81..5c32484ec774 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -57,12 +57,8 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.scripts.DSpaceRunnable; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.submit.model.AccessConditionOption; import org.dspace.utils.DSpace; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file. @@ -72,14 +68,10 @@ */ public class BulkAccessControl extends DSpaceRunnable> { - private static final Logger LOGGER = LoggerFactory.getLogger(BulkAccessControl.class); - private DSpaceObjectUtils dSpaceObjectUtils; private SearchService searchService; - private ConfigurationService configurationService; - private ItemService itemService; private String filename; @@ -107,7 +99,6 @@ public void setup() throws ParseException { this.searchService = SearchUtils.getSearchService(); this.itemService = ContentServiceFactory.getInstance().getItemService(); this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); - this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName( "bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class); this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( @@ -359,16 +350,15 @@ private void setItemPolicies(Item item, AccessControl accessControl) throws SQLE } private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { - - if (containsConstraints(accessControl.getBitstream())) { - findMatchedBitstreams(item, accessControl.getBitstream().getConstraints().getUuid()) - .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); - } else { - item.getBundles(CONTENT_BUNDLE_NAME).stream() - .flatMap(bundle -> bundle.getBitstreams().stream()) - .forEach(bitstream -> - updateBitstreamPolicies(bitstream, item, accessControl)); - } + AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); + + item.getBundles(CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> constraints == null || + constraints.getUuid() == null || + constraints.getUuid().size() == 0 || + constraints.getUuid().contains(bitstream.getID().toString())) + .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); } private boolean containsConstraints(AccessConditionBitstream bitstream) { @@ -377,13 +367,6 @@ private boolean containsConstraints(AccessConditionBitstream bitstream) { isNotEmpty(bitstream.getConstraints().getUuid()); } - private List findMatchedBitstreams(Item item, List uuids) { - return item.getBundles(CONTENT_BUNDLE_NAME).stream() - .flatMap(bundle -> bundle.getBitstreams().stream()) - .filter(bitstream -> uuids.contains(bitstream.getID().toString())) - .collect(Collectors.toList()); - } - private void updateBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) { if (REPLACE_MODE.equals(accessControl.getBitstream().getMode())) { @@ -440,7 +423,7 @@ private void assignCurrentUserInContext() throws SQLException { } } - private void assignSpecialGroupsInContext() throws SQLException { + private void assignSpecialGroupsInContext() { for (UUID uuid : handler.getSpecialGroups()) { context.setSpecialGroup(uuid); } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java index edda394c8237..1fa6e12abd33 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -39,11 +39,8 @@ public boolean isAllowedToExecute(Context context, List dspaceObjectIDs = commandLineParameters.stream() diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index 21efe0420f3a..c1d6a0db64ca 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -26,7 +26,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; @@ -34,7 +33,6 @@ import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.PathUtils; -import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index 79927833d26e..fd218aa77a8d 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -43,9 +43,4 @@ - - - - - diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java new file mode 100644 index 000000000000..da0f90ca97c7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java @@ -0,0 +1,502 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static com.jayway.jsonpath.JsonPath.read; +import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.assertTrue; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter; +import org.dspace.app.rest.model.ParameterValueRest; +import org.dspace.app.rest.model.ProcessRest; +import org.dspace.app.rest.model.ScriptRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.test.AbstractEntityIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.ProcessBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.Process; +import org.dspace.scripts.service.ProcessService; +import org.junit.After; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.mock.web.MockMultipartFile; + +/** + * Basic integration testing for the bulk access Import feature via UI {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.com) + */ +public class BulkAccessControlScriptIT extends AbstractEntityIntegrationTest { + + @Autowired + private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; + + @Autowired + private GroupService groupService; + + @Autowired + private ProcessService processService; + + private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME; + private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/bulk-access-control/" + + ProcessRest.PLURAL_NAME; + + @After + @Override + public void destroy() throws Exception { + List processes = processService.findAll(context); + for (Process process : processes) { + ProcessBuilder.deleteProcess(process.getID()); + } + + super.destroy(); + } + + @Test + public void bulkAccessScriptWithAdminUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .withSubject("ExtraEntry") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", item.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetCommunityTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .withAdminGroup(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", parentCommunity.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetCollectionTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", collection.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetItemTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .withSubject("ExtraEntry") + .withAdminUser(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", item.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithMultipleTargetUuidsWithAdminUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collection) + .withTitle("Public item one") + .build(); + + Item itemTwo = ItemBuilder.createItem(context, collection) + .withTitle("Public item two") + .build(); + + Item itemThree = ItemBuilder.createItem(context, collection) + .withTitle("Public item three") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", itemOne.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-u", itemTwo.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-u", itemThree.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + + itemOne = context.reloadEntity(itemOne); + itemTwo = context.reloadEntity(itemTwo); + itemThree = context.reloadEntity(itemThree); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(itemOne.getResourcePolicies(), hasSize(1)); + assertThat(itemTwo.getResourcePolicies(), hasSize(1)); + assertThat(itemThree.getResourcePolicies(), hasSize(1)); + + assertThat(itemOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + assertThat(itemTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + assertThat(itemThree.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithoutTargetUUIDParameterTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .param("properties", new ObjectMapper().writeValueAsString(List.of())) + ) + .andExpect(status().isInternalServerError()) + .andExpect(result -> assertTrue(result.getResolvedException() + .getMessage() + .contains("At least one target uuid must be provided"))); + } + + @Test + public void bulkAccessScriptWithNormalUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", parentCommunity.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isForbidden()); + } + +} diff --git a/dspace/config/spring/rest/scripts.xml b/dspace/config/spring/rest/scripts.xml index eda8c579a89c..0fae31d552c1 100644 --- a/dspace/config/spring/rest/scripts.xml +++ b/dspace/config/spring/rest/scripts.xml @@ -64,9 +64,4 @@ - - - - - From 2e69ba7f63537636c0f78da04098a17beddb5ecb Mon Sep 17 00:00:00 2001 From: samuel Date: Wed, 23 Nov 2022 15:39:56 +0100 Subject: [PATCH 205/686] 96598: Test feedback pt. 3: submission - repair auto-refresh --- .../java/org/dspace/app/util/DCInputSet.java | 3 +- .../src/main/java/org/dspace/core/Utils.java | 4 ++- .../app/rest/submit/step/DescribeStep.java | 35 ++++++++++++++++++- .../step/validation/MetadataValidation.java | 5 ++- 4 files changed, 43 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index bfd4270cf27f..06ef1f979cde 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -9,6 +9,7 @@ import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Utils; @@ -121,7 +122,7 @@ public boolean isFieldPresent(String fieldName) { } } else { String fullName = field.getFieldName(); - if (fullName.equals(fieldName)) { + if (Objects.equals(fullName, fieldName)) { return true; } } diff --git a/dspace-api/src/main/java/org/dspace/core/Utils.java b/dspace-api/src/main/java/org/dspace/core/Utils.java index b9fff20c7674..2e30029cc530 100644 --- a/dspace-api/src/main/java/org/dspace/core/Utils.java +++ b/dspace-api/src/main/java/org/dspace/core/Utils.java @@ -415,7 +415,9 @@ public static String[] tokenize(String metadata) { * @return metadata field key */ public static String standardize(String schema, String element, String qualifier, String separator) { - if (StringUtils.isBlank(qualifier)) { + if (StringUtils.isBlank(element)) { + return null; + } else if (StringUtils.isBlank(qualifier)) { return schema + separator + element; } else { return schema + separator + element + separator + qualifier; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java index 10a96a557f6c..09ffa900c287 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java @@ -7,6 +7,9 @@ */ package org.dspace.app.rest.submit.step; +import static org.apache.commons.collections4.CollectionUtils.isEmpty; +import static org.apache.commons.lang3.StringUtils.isBlank; + import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; @@ -28,7 +31,11 @@ import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.content.InProgressSubmission; +import org.dspace.content.Item; import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipMetadataService; +import org.dspace.content.RelationshipMetadataValue; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.core.Utils; @@ -45,6 +52,9 @@ public class DescribeStep extends AbstractProcessingStep { private DCInputsReader inputReader; + private RelationshipMetadataService relationshipMetadataService = + ContentServiceFactory.getInstance().getRelationshipMetadataService(); + public DescribeStep() throws DCInputsReaderException { inputReader = new DCInputsReader(); } @@ -73,7 +83,10 @@ private void readField(InProgressSubmission obj, SubmissionStepConfig config, Da fieldsName.add(input.getFieldName() + "." + (String) qualifier); } } else { - fieldsName.add(input.getFieldName()); + String fieldName = input.getFieldName(); + if (fieldName != null) { + fieldsName.add(fieldName); + } } @@ -108,6 +121,26 @@ private void readField(InProgressSubmission obj, SubmissionStepConfig config, Da } } } + + if (input.isRelationshipField() && isBlank(input.getFieldName())) { + Item item = obj.getItem(); + String key = "relationship." + input.getRelationshipType(); + if (isEmpty(data.getMetadata().get(key))) { + data.getMetadata().put(key, new ArrayList<>()); + } + for (RelationshipMetadataValue metadataValue : + relationshipMetadataService.getRelationshipMetadata(item, true)) { + if (metadataValue.getMetadataField().getElement().equals(input.getRelationshipType())) { + MetadataValueRest dto = new MetadataValueRest(); + dto.setAuthority(metadataValue.getAuthority()); + dto.setConfidence(metadataValue.getConfidence()); + dto.setLanguage(metadataValue.getLanguage()); + dto.setPlace(metadataValue.getPlace()); + dto.setValue(metadataValue.getValue()); + data.getMetadata().get(key).add(dto); + } + } + } } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java index 9a0132674643..7a8249a77096 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java @@ -83,7 +83,10 @@ public List validate(SubmissionService submissionService, InProgressS } } else { - fieldsName.add(input.getFieldName()); + String fieldName = input.getFieldName(); + if (fieldName != null) { + fieldsName.add(fieldName); + } } for (String fieldName : fieldsName) { From f49b6f24ba14646eb5727116b80c9625f53cd6e0 Mon Sep 17 00:00:00 2001 From: samuel Date: Fri, 10 Feb 2023 12:39:09 +0100 Subject: [PATCH 206/686] 99221: Fix minor issues with Relationships in submission - repair relationship error --- dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index 06ef1f979cde..3ab250bfc28e 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -120,6 +120,9 @@ public boolean isFieldPresent(String fieldName) { return true; } } + } else if (field.isRelationshipField() && + ("relation." + field.getRelationshipType()).equals(fieldName)) { + return true; } else { String fullName = field.getFieldName(); if (Objects.equals(fullName, fieldName)) { From c6b044222d356025d6051fcf676d1acf289c4d4a Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 12 May 2023 10:53:59 -0500 Subject: [PATCH 207/686] Remove setting to wait on two codecov builds --- .codecov.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index a628d33cbec5..326dd3e0b29e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,13 +4,6 @@ # Can be validated via instructions at: # https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml -# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed -# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage -# needs to be merged across those builds -codecov: - notify: - after_n_builds: 2 - # Settings related to code coverage analysis coverage: status: From 95af5fff410b92ef9224baed50c6900e7ae88754 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Mon, 15 May 2023 08:40:57 +0100 Subject: [PATCH 208/686] VersionedDOIIdentifierProvider: add non-fatal check that dso is of type Item --- .../dspace/identifier/VersionedDOIIdentifierProvider.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 4374a2549ea9..4ca186eaab49 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -156,7 +156,13 @@ public void register(Context context, DSpaceObject dso, String identifier) throw @Override public String register(Context context, DSpaceObject dso, Filter filter) throws IdentifierException { + if (!(dso instanceof Item)) { + // DOIs are currently assigned only to Items + return null; + } + String doi = mint(context, dso, filter); + register(context, dso, doi, filter); return doi; From 2f1d52bf3fbb40a7c02447a743a9843fe5ca927b Mon Sep 17 00:00:00 2001 From: Sascha Szott Date: Tue, 16 May 2023 09:09:17 +0200 Subject: [PATCH 209/686] removed eperson.subscription.onlynew --- dspace/config/dspace.cfg | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 65b1f951fad3..cb69f9841fb0 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -893,10 +893,6 @@ org.dspace.app.batchitemimport.work.dir = ${dspace.dir}/imports # default = false, (disabled) #org.dspace.content.Collection.findAuthorizedPerformanceOptimize = true -# For backwards compatibility, the subscription emails by default include any modified items -# uncomment the following entry for only new items to be emailed -# eperson.subscription.onlynew = true - # Identifier providers. # Following are configuration values for the EZID DOI provider, with appropriate From 84ac53c7c2839f901d73247d33de63e67e32258c Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Tue, 16 May 2023 13:29:45 +0200 Subject: [PATCH 210/686] Security issue fixed: request item token exposed --- .../repository/RequestItemRepository.java | 4 +- .../app/rest/RequestItemRepositoryIT.java | 61 +++---------------- 2 files changed, 12 insertions(+), 53 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 198d0f765f87..6fe3452a7fb8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -207,8 +207,8 @@ public RequestItemRest createAndReturn(Context ctx) } catch (IOException | SQLException ex) { throw new RuntimeException("Request not sent.", ex); } - - return requestItemConverter.convert(ri, Projection.DEFAULT); + // #8636 - Security issue: Should not return RequestItemRest to avoid token exposure + return null; } // NOTICE: there is no service method for this -- requests are never deleted? diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 4a90efb2c1bc..a9b2aaf3f279 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -221,32 +221,11 @@ public void testCreateAndReturnAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); } /** @@ -273,31 +252,11 @@ public void testCreateAndReturnNotAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } /** From 5f58e587f321989e277f2a37591cd9e939a9b5a2 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Tue, 16 May 2023 13:29:45 +0200 Subject: [PATCH 211/686] Security issue fixed: request item token exposed --- .../repository/RequestItemRepository.java | 4 +- .../app/rest/RequestItemRepositoryIT.java | 64 +++---------------- 2 files changed, 12 insertions(+), 56 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 198d0f765f87..6fe3452a7fb8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -207,8 +207,8 @@ public RequestItemRest createAndReturn(Context ctx) } catch (IOException | SQLException ex) { throw new RuntimeException("Request not sent.", ex); } - - return requestItemConverter.convert(ri, Projection.DEFAULT); + // #8636 - Security issue: Should not return RequestItemRest to avoid token exposure + return null; } // NOTICE: there is no service method for this -- requests are never deleted? diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 4a90efb2c1bc..2000b6e47e61 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -8,13 +8,10 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; -import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.exparity.hamcrest.date.DateMatchers.within; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.text.IsEmptyString.emptyOrNullString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; @@ -221,32 +218,11 @@ public void testCreateAndReturnAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); } /** @@ -273,31 +249,11 @@ public void testCreateAndReturnNotAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } /** From 567e989202aeb55393892ca402ab26b85324f6da Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Wed, 17 May 2023 19:11:31 +0200 Subject: [PATCH 212/686] 8636 Request a Copy - integration test increased --- .../app/rest/RequestItemRepositoryIT.java | 68 ++++++++++++++++--- 1 file changed, 57 insertions(+), 11 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 2000b6e47e61..937d35c091ea 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -13,6 +13,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; @@ -31,6 +32,7 @@ import java.sql.SQLException; import java.time.temporal.ChronoUnit; import java.util.Date; +import java.util.Iterator; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; @@ -218,12 +220,34 @@ public void testCreateAndReturnAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()); - } + try { + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); + } finally { + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(eperson.getEmail())) { + // Verify request data + assertEquals(eperson.getFullName(), requestItem.getReqName()); + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(true, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); + } +} /** * Test of createAndReturn method, with an UNauthenticated user. @@ -249,11 +273,33 @@ public void testCreateAndReturnNotAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()); - + try{ + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); + }finally{ + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(RequestItemBuilder.REQ_EMAIL)) { + // Verify request data + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(RequestItemBuilder.REQ_NAME, requestItem.getReqName()); + assertEquals(bitstream.getID(), requestItem.getBitstream().getID()); + assertEquals(false, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); + } } /** From 9784c6f50dc984f6769f3774066bcd4be5eba53c Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Wed, 17 May 2023 19:26:50 +0200 Subject: [PATCH 213/686] 8636 - checkstyle --- .../java/org/dspace/app/rest/RequestItemRepositoryIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 937d35c091ea..2fb7dbbc969d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -273,14 +273,14 @@ public void testCreateAndReturnNotAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - try{ + try { getClient().perform(post(URI_ROOT) .content(mapper.writeValueAsBytes(rir)) .contentType(contentType)) .andExpect(status().isCreated()) // verify the body is empty .andExpect(jsonPath("$").doesNotExist()); - }finally{ + } finally { Iterator itemRequests = requestItemService.findByItem(context, item); String token = null; for (Iterator it = itemRequests; it.hasNext();) { From 564f9fdcf6d18514cb416773faf9596d63839e2a Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Fri, 19 May 2023 16:43:05 +0300 Subject: [PATCH 214/686] pom.xml: bump a handful of dependencies All minor and patch versions with no breaking changes: - pdfbox 2.0.28 - See: https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310760&version=12352284 - log4j 2.20.0 - See: https://logging.apache.org/log4j/2.x/release-notes/2.20.0.html - rome 1.19.0 - See: https://github.com/rometools/rome/releases/tag/1.19.0 - slf4j 1.7.36 - https://www.slf4j.org/news.html - solr-client 8.11.2 - https://lucene.apache.org/core/8_11_2/changes/Changes.html - hibernate 5.6.15.Final - See: https://hibernate.org/orm/releases/5.6/#whats-new - hibernate-validator 6.2.5.Final - See: https://hibernate.org/validator/documentation/migration-guide/#6-2-x - postgresql JDBC driver 42.6.0 --- pom.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 7e09188027db..828854c0cab1 100644 --- a/pom.xml +++ b/pom.xml @@ -22,10 +22,10 @@ 5.3.27 2.7.11 5.7.8 - 5.6.5.Final - 6.0.23.Final - 42.4.3 - 8.11.1 + 5.6.15.Final + 6.2.5.Final + 42.6.0 + 8.11.2 3.4.0 2.10.0 @@ -38,10 +38,10 @@ 1.1.0 9.4.51.v20230217 - 2.17.1 - 2.0.27 - 1.18.0 - 1.7.25 + 2.20.0 + 2.0.28 + 1.19.0 + 1.7.36 2.3.0 1.70 From d7d7f7c37034f6c571c1a61d5ba9afdc71d91d0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 19 May 2023 22:44:07 +0100 Subject: [PATCH 215/686] support for entity type for collection at input submissions --- .../app/util/SubmissionConfigReader.java | 26 ++++++++++++++++--- .../dspace/content/CollectionServiceImpl.java | 22 ++++++++++++++++ .../content/service/CollectionService.java | 15 +++++++++++ dspace/config/item-submission.dtd | 3 ++- dspace/config/item-submission.xml | 20 ++++++++++++++ 5 files changed, 82 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 21208483583e..7132c1e934e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -22,6 +22,8 @@ import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; import org.dspace.core.Context; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; @@ -104,6 +106,9 @@ public class SubmissionConfigReader { * always reload from scratch) */ private SubmissionConfig lastSubmissionConfig = null; + + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); /** * Load Submission Configuration from the @@ -335,17 +340,22 @@ private void doNodes(Node n) throws SAXException, SubmissionConfigReaderExceptio * by the collection handle. */ private void processMap(Node e) throws SAXException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -355,7 +365,17 @@ private void processMap(Node e) throws SAXException { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index ddfd38694f23..ef89009ebfb2 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -18,6 +18,7 @@ import java.util.Map; import java.util.MissingResourceException; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.UUID; @@ -1047,4 +1048,25 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu return (int) resp.getTotalSearchResults(); } + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setMaxResults(0); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index a5b2b7d8d891..82d8b24fb706 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -455,4 +455,19 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; + + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; } diff --git a/dspace/config/item-submission.dtd b/dspace/config/item-submission.dtd index 6490dac62c19..dd1afa0dd02d 100644 --- a/dspace/config/item-submission.dtd +++ b/dspace/config/item-submission.dtd @@ -11,7 +11,8 @@ diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 2ab26dcf57df..f937a5fd9a78 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -47,6 +47,26 @@ --> + + + From 0409373b617a4f3c0b8e76c50133b14c6e016718 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 20 May 2023 08:07:05 +0100 Subject: [PATCH 216/686] handling exceptions --- .../java/org/dspace/app/util/SubmissionConfigReader.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 7132c1e934e5..82ebbd0d0d25 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -25,6 +25,7 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -157,6 +158,9 @@ private void buildInputs(String fileName) throws SubmissionConfigReaderException } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -292,7 +296,7 @@ public SubmissionStepConfig getStepConfig(String stepID) * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -339,7 +343,7 @@ private void doNodes(Node n) throws SAXException, SubmissionConfigReaderExceptio * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { // create a context Context context = new Context(); From 687b6216dfabb5e7d4069c30e7aeb2cecc73b602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 20 May 2023 10:41:51 +0100 Subject: [PATCH 217/686] checkstyle violations fixing --- .../java/org/dspace/app/util/SubmissionConfigReader.java | 6 +++++- .../main/java/org/dspace/content/CollectionServiceImpl.java | 1 - .../java/org/dspace/content/service/CollectionService.java | 1 - 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 82ebbd0d0d25..91be9a08e605 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -107,7 +107,11 @@ public class SubmissionConfigReader { * always reload from scratch) */ private SubmissionConfig lastSubmissionConfig = null; - + + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index ef89009ebfb2..2166a947386d 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -18,7 +18,6 @@ import java.util.Map; import java.util.MissingResourceException; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.UUID; diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 82d8b24fb706..9ded79fadad0 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -455,7 +455,6 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; - /** * Returns a list of all collections for a specific entity type. * NOTE: for better performance, this method retrieves its results from an index (cache) From 6fa9e74d9006b260a7ca5edc40d734219b487682 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 20 May 2023 11:35:27 +0100 Subject: [PATCH 218/686] checkstyle violations fixing --- .../main/java/org/dspace/app/util/SubmissionConfigReader.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 91be9a08e605..d394c60e41f6 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -109,7 +109,7 @@ public class SubmissionConfigReader { private SubmissionConfig lastSubmissionConfig = null; /** - * Collection Service instance, needed to interact with collection's + * Collection Service instance, needed to interact with collection's * stored data */ protected static final CollectionService collectionService @@ -381,7 +381,7 @@ private void processMap(Node e) throws SAXException, SearchServiceException { List collections = collectionService.findAllCollectionsByEntityType( context, entityType); for (Collection collection : collections) { - collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); } } } // ignore any child node that isn't a "name-map" From 2ef268380fedec1aaba1acff291ab04e425eab84 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Sat, 20 May 2023 12:56:38 +1200 Subject: [PATCH 219/686] Unlink DOI from item on deletion even if no provider is configured --- .../java/org/dspace/content/ItemServiceImpl.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index a290cb0d995f..f86b6690ad29 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -64,7 +64,9 @@ import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidQueue; @@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -786,6 +790,16 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); From 208cac08d561de5992812fe2aaaf92929fedd4b4 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Sun, 21 May 2023 15:42:56 +1200 Subject: [PATCH 220/686] modifying unit tests as per CI feedback, stubbings now unnecessary --- .../src/test/java/org/dspace/content/CollectionTest.java | 3 --- dspace-api/src/test/java/org/dspace/content/ItemTest.java | 2 -- 2 files changed, 5 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java index 1548ebcae0d8..13d037abf823 100644 --- a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java +++ b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java @@ -725,9 +725,6 @@ public void testRemoveItemAuth() throws Exception { // Allow Item REMOVE perms doNothing().when(authorizeServiceSpy) .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); - // Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion) - doNothing().when(authorizeServiceSpy) - .authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE)); // create & add item first context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 15e425e23a2a..bae6ce9e1d90 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -1189,8 +1189,6 @@ public void testDeleteAuth() throws Exception { doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); // Allow Item DELETE perms doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); - // Allow Item WRITE perms (required to first delete identifiers) - doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); UUID id = item.getID(); itemService.delete(context, item); From 2b3af3a126ae9b5f523660cb544d7cd0a6192f9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 22 May 2023 08:42:33 +0100 Subject: [PATCH 221/686] checkstyle violations fixing --- .../main/java/org/dspace/app/util/SubmissionConfigReader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index d394c60e41f6..9ed539ee4f48 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -663,4 +663,4 @@ public List getCollectionsBySubmissionConfig(Context context, String } return results; } -} +} \ No newline at end of file From 6018a2b7be85c3593e30c66bfedf1a95d9363439 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Mon, 22 May 2023 11:50:59 +0300 Subject: [PATCH 222/686] pom.xml: bump tika from 2.3.0 to 2.5.0 A handful of bug fixes, improvements to PDF parsing, and updates to dependencies. This is the highest we can go right now without hitting dependency convergence issues related to bouncycastle. See: https://github.com/apache/tika/blob/2.5.0/CHANGES.txt --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 828854c0cab1..ad368e318072 100644 --- a/pom.xml +++ b/pom.xml @@ -42,7 +42,7 @@ 2.0.28 1.19.0 1.7.36 - 2.3.0 + 2.5.0 1.70 From 8ddf4702af8a13f0527ede69109d2de993463c72 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Mon, 22 May 2023 11:09:43 +0200 Subject: [PATCH 223/686] [DURACOM-127] check if versioning is enabled for versioned identifier providers --- .../VersionedDOIIdentifierProvider.java | 16 +++++++++++++++- .../VersionedHandleIdentifierProvider.java | 16 +++++++++++++++- ...eIdentifierProviderWithCanonicalHandles.java | 17 ++++++++++++++++- 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 4ca186eaab49..e5a90907c7b6 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -27,13 +27,14 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author Marsa Haoua * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ -public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { +public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean { /** * log4j category */ @@ -49,6 +50,19 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Autowired(required = true) protected VersionHistoryService versionHistoryService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public String mint(Context context, DSpaceObject dso) throws IdentifierException { return mint(context, dso, this.filter); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java index b29d47f406c2..4f9efd220695 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java @@ -35,6 +35,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -45,7 +46,7 @@ * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ @Component -public class VersionedHandleIdentifierProvider extends IdentifierProvider { +public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean { /** * log4j category */ @@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider { @Autowired(required = true) protected ContentServiceFactory contentServiceFactory; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 0fac326ca1a7..78ad6b7b79bb 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -30,6 +30,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,7 +40,8 @@ * @author Ben Bosman (ben at atmire dot com) */ @Component -public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider { +public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider + implements InitializingBean { /** * log4j category */ @@ -65,6 +67,19 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); From fc2589464f7e2471aff52b252c83fb4b6e7eebdf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 22 May 2023 12:57:56 +0100 Subject: [PATCH 224/686] checkstyle violations fixing and remove unnecessary max rows limit --- .../main/java/org/dspace/app/util/SubmissionConfigReader.java | 2 +- .../src/main/java/org/dspace/content/CollectionServiceImpl.java | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 9ed539ee4f48..0f144fd69f46 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -381,7 +381,7 @@ private void processMap(Node e) throws SAXException, SearchServiceException { List collections = collectionService.findAllCollectionsByEntityType( context, entityType); for (Collection collection : collections) { - collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); } } } // ignore any child node that isn't a "name-map" diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 2166a947386d..5b70cc4ec086 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1054,7 +1054,6 @@ public List findAllCollectionsByEntityType(Context context, String e List collectionList = new ArrayList<>(); DiscoverQuery discoverQuery = new DiscoverQuery(); - discoverQuery.setMaxResults(0); discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); From cbfed3e2183dc4fa7d9c0e8fd253b47e28364f79 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Mon, 22 May 2023 15:37:48 +0200 Subject: [PATCH 225/686] [DURACOM-148] add crossref metadata processor to manage the date issued field --- .../CrossRefDateMetadataProcessor.java | 79 +++++++++++++++++++ ...CrossRefImportMetadataSourceServiceIT.java | 4 +- .../spring/api/crossref-integration.xml | 5 +- 3 files changed, 85 insertions(+), 3 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java new file mode 100644 index 000000000000..c7d806cdf4cd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; +import org.joda.time.LocalDate; + +/** + * This class is used for CrossRef's Live-Import to extract + * issued attribute. + * Beans are configured in the crossref-integration.xml file. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator dates = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (dates.hasNext()) { + JsonNode date = dates.next(); + LocalDate issuedDate = null; + SimpleDateFormat issuedDateFormat = null; + if (date.has(0) && date.has(1) && date.has(2)) { + issuedDate = new LocalDate( + date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + date.get(2).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + } else if (date.has(0) && date.has(1)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()) + .withMonthOfYear(date.get(1).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM"); + } else if (date.has(0)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy"); + } + values.add(issuedDateFormat.format(issuedDate.toDate())); + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 9a0d39225c3d..11fe58ac1d2e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -146,7 +146,7 @@ private ArrayList getRecords() { + " Medical College of Prevention of Iodine Deficiency Diseases"); MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "L.V. Senyuk"); MetadatumDTO type = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.184"); @@ -172,7 +172,7 @@ private ArrayList getRecords() { "Ischemic Heart Disease and Role of Nurse of Cardiology Department"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "K. І. Kozak"); MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.105"); diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml index e01b613833e4..5d67c17626e2 100644 --- a/dspace/config/spring/api/crossref-integration.xml +++ b/dspace/config/spring/api/crossref-integration.xml @@ -69,8 +69,11 @@ - + + + + From c3dd1f8489a37fc177769ecd50a492397ab72502 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 22 May 2023 09:23:21 -0500 Subject: [PATCH 226/686] Update to latest Spring Boot --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 828854c0cab1..ef6227e6ff2d 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 11 5.3.27 - 2.7.11 + 2.7.12 5.7.8 5.6.15.Final 6.2.5.Final From 7ab5f3b48925449cae681c05f305dffe86f970e6 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 22:58:15 +0300 Subject: [PATCH 227/686] dspace-api: fix typo in ImageMagickThumbnailFilter --- .../org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d16243e3e3bc..9fa310120555 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -208,7 +208,7 @@ public boolean preProcessBitstream(Context c, Item item, Bitstream source, boole if (description != null) { if (replaceRegex.matcher(description).matches()) { if (verbose) { - System.out.format("%s %s matches pattern and is replacable.%n", + System.out.format("%s %s matches pattern and is replaceable.%n", description, nsrc); } continue; From c37622cb3f094d4f1b472db2a588e096422b0b62 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 23:00:36 +0300 Subject: [PATCH 228/686] dspace-api: improve logging in ImageMagickThumbnailFilter Instead of logging the name of the source bitstream, we should be logging the name of the actual thumbnail bitstream that is being considered for replacement. For example, instead of this: IM Thumbnail manual.pdf matches pattern and is replaceable. ... the message should read: IM Thumbnail manual.pdf.jpg matches pattern and is replaceable. This message is already confusing enough, but this will help. --- .../dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index 9fa310120555..b7c8acc6fdc5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -209,14 +209,14 @@ public boolean preProcessBitstream(Context c, Item item, Bitstream source, boole if (replaceRegex.matcher(description).matches()) { if (verbose) { System.out.format("%s %s matches pattern and is replaceable.%n", - description, nsrc); + description, n); } continue; } if (description.equals(getDescription())) { if (verbose) { System.out.format("%s %s is replaceable.%n", - getDescription(), nsrc); + getDescription(), n); } continue; } From 7dbfa9a3c5e3c85daf2e83d5cbbeb547be93376f Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 23:10:29 +0300 Subject: [PATCH 229/686] dspace-api: minor logging fix in ImageMagickThumbnailFilter Minor standardization to logging (unneccessary capitalization and excessive spaces). --- .../org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index b7c8acc6fdc5..383fc50868e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -221,7 +221,7 @@ public boolean preProcessBitstream(Context c, Item item, Bitstream source, boole continue; } } - System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", + System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", nsrc, item.getHandle()); return false; } From a4105bdcb0a07f4d4b573a36010d39fa4a576552 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 23:20:34 +0300 Subject: [PATCH 230/686] dspace-api: simplify logic in ImageMagickThumbnailFilter There is no point passing a page parameter here, with a default of 0 no less, because we will *always* use the first page of the PDF to generate the thumbnail. No other filters use this function and the page parameter is not configurable so we should just hard code it. --- .../dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java | 2 +- .../dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index 467303c3cafd..e03e770f7665 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,7 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo File f2 = null; File f3 = null; try { - f2 = getImageFile(f, 0, verbose); + f2 = getImageFile(f, verbose); f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index 383fc50868e5..d156fdbc1a2a 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,7 +116,7 @@ public File getThumbnailFile(File f, boolean verbose) return f2; } - public File getImageFile(File f, int page, boolean verbose) + public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); f2.deleteOnExit(); @@ -155,7 +155,7 @@ public File getImageFile(File f, int page, boolean verbose) op.define("pdf:use-cropbox=true"); } - String s = "[" + page + "]"; + String s = "[0]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { op.flatten(); From 7ebb5290fc6a1c65b7ffc3f653e43932d8b79f02 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 18 May 2023 09:47:06 +0300 Subject: [PATCH 231/686] dspace-api: comment ImageMagick filters Add some comments to document the functionality of the ImageMagick thumbnail filters. This will help others understand it later when we need to re-factor them. --- .../app/mediafilter/ImageMagickPdfThumbnailFilter.java | 2 ++ .../dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index e03e770f7665..afe1bb3d75df 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,9 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo File f2 = null; File f3 = null; try { + // Step 1: get an image from our PDF file, with PDF-specific processing options f2 = getImageFile(f, verbose); + // Step 2: use the image above to create the final resized and rotated thumbnail f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d156fdbc1a2a..450882ed1717 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,6 +116,13 @@ public File getThumbnailFile(File f, boolean verbose) return f2; } + /** + * Return an image from a bitstream with specific processing options for + * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to + * generate an intermediate image file for use with getThumbnailFile. It + * is unfortunate that this means we convert from PDF to JPEG to JPEG, + * which incurs generation loss. + */ public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); From 5357923d281d796cb72733ff9fa3244b3ef55490 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 18 May 2023 14:18:00 +0300 Subject: [PATCH 232/686] dspace-api: avoid generation loss in ImageMagickThumbnailFilter When filtering PDF bitstreams, the ImageMagickThumbnailFilter first creates an intermediate JPEG and then a "thumbnail" JPEG. These two operations are both lossy. The ImageMagick usage guide warns against doing that: > JPEG losses (sic) information, degrading images when saved. > Use some other format for intermediate images during processing. > Only use JPEG format, for the final image, not for further processing. As our current filter architecture requires writing a temporary file we must choose one of the following lossless formats to use for the intermediate: PNG, TIFF, or MIFF. MIFF is ImageMagick's own internal format and is much faster to write than PNG. By eliminating the first lossy conversion we gain 1.1% points on the ssimulacra2 (v2.1) scoring scale of visual quality. See: https://imagemagick.org/Usage/formats/#jpg --- .../app/mediafilter/ImageMagickThumbnailFilter.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index 450882ed1717..408982d157e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -119,13 +119,14 @@ public File getThumbnailFile(File f, boolean verbose) /** * Return an image from a bitstream with specific processing options for * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to - * generate an intermediate image file for use with getThumbnailFile. It - * is unfortunate that this means we convert from PDF to JPEG to JPEG, - * which incurs generation loss. + * generate an intermediate image file for use with getThumbnailFile. */ public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { - File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + // Writing an intermediate file to disk is inefficient, but since we're + // doing it anyway, we should use a lossless format. IM's internal MIFF + // is lossless like PNG and TIFF, but much faster. + File f2 = new File(f.getParentFile(), f.getName() + ".miff"); f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); From 05f1714b0566f2091590c1981e4a128db03ab428 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Mon, 10 Apr 2023 11:36:47 +0200 Subject: [PATCH 233/686] Replace occurrences of DSpace with the dspace.name variable --- dspace/config/dspace.cfg | 1 + dspace/config/emails/change_password | 12 ++++-------- dspace/config/emails/doi_maintenance_error | 4 +++- dspace/config/emails/export_error | 7 ++----- dspace/config/emails/export_success | 6 ++---- dspace/config/emails/feedback | 3 ++- dspace/config/emails/flowtask_notify | 4 ++-- dspace/config/emails/harvesting_error | 4 +++- dspace/config/emails/internal_error | 2 +- dspace/config/emails/register | 10 +++------- dspace/config/emails/registration_notify | 4 +++- dspace/config/emails/request_item.admin | 8 +++++--- dspace/config/emails/request_item.author | 8 +++++--- dspace/config/emails/submit_archive | 6 +++--- dspace/config/emails/submit_reject | 7 +++---- dspace/config/emails/submit_task | 6 +++--- dspace/config/emails/subscription | 4 ++-- dspace/config/emails/welcome | 5 ++--- 18 files changed, 49 insertions(+), 52 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 8e532310c11b..11c53cb6a7a9 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -38,6 +38,7 @@ dspace.ui.url = http://localhost:4000 # Name of the site dspace.name = DSpace at My University +dspace.shortname = DSpace # Assetstore configurations have moved to config/modules/assetstore.cfg # and config/spring/api/bitstore.xml. diff --git a/dspace/config/emails/change_password b/dspace/config/emails/change_password index eb114feeeb0c..908e494596cc 100644 --- a/dspace/config/emails/change_password +++ b/dspace/config/emails/change_password @@ -4,19 +4,15 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Change Password Request') +#set($subject = "${config.get('dspace.name')}: Change Password Request") #set($phone = ${config.get('mail.message.helpdesk.telephone')}) -To change the password for your DSpace account, please click the link -below: +To change the password for your ${config.get('dspace.name')} account, please click the link below: ${params[0]} -If you need assistance with your account, please email - - ${config.get("mail.helpdesk")} +If you need assistance with your account, please email ${config.get("mail.helpdesk")} #if( $phone ) - or call us at ${phone}. #end -The DSpace Team +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/doi_maintenance_error b/dspace/config/emails/doi_maintenance_error index 5424432f64ce..a86de915469b 100644 --- a/dspace/config/emails/doi_maintenance_error +++ b/dspace/config/emails/doi_maintenance_error @@ -10,9 +10,11 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = "DSpace: Error ${params[0]} DOI ${params[3]}") +#set($subject = "${config.get('dspace.name')}: Error ${params[0]} DOI ${params[3]}") Date: ${params[1]} ${params[0]} DOI ${params[4]} for ${params[2]} with ID ${params[3]} failed: ${params[5]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/export_error b/dspace/config/emails/export_error index 79468c281e3e..5223f64e3379 100644 --- a/dspace/config/emails/export_error +++ b/dspace/config/emails/export_error @@ -6,14 +6,11 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace - The item export you requested was not completed.') +#set($subject = "${config.get('dspace.name')}: The item export you requested was not completed.") The item export you requested was not completed, due to the following reason: ${params[0]} For more information you may contact your system administrator: ${params[1]} - - -The DSpace Team - +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/export_success b/dspace/config/emails/export_success index b97a3798738e..211e40dd787d 100644 --- a/dspace/config/emails/export_success +++ b/dspace/config/emails/export_success @@ -5,7 +5,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace - Item export requested is ready for download') +#set($subject = "${config.get('dspace.name')}: Item export requested is ready for download") The item export you requested from the repository is now ready for download. You may download the compressed file using the following link: @@ -13,6 +13,4 @@ ${params[0]} This file will remain available for at least ${params[1]} hours. - -The DSpace Team - +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/feedback b/dspace/config/emails/feedback index 7998367c264f..5bf83eda760c 100644 --- a/dspace/config/emails/feedback +++ b/dspace/config/emails/feedback @@ -10,7 +10,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Feedback Form Information') +#set($subject = "${config.get('dspace.name')}: Feedback Form Information") Comments: @@ -24,3 +24,4 @@ Referring Page: ${params[3]} User Agent: ${params[4]} Session: ${params[5]} +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/flowtask_notify b/dspace/config/emails/flowtask_notify index 7e5286e3074c..f277b7f2e79f 100644 --- a/dspace/config/emails/flowtask_notify +++ b/dspace/config/emails/flowtask_notify @@ -7,7 +7,7 @@ ## {4} Task result ## {5} Workflow action taken ## -#set($subject = 'DSpace: Curation Task Report') +#set($subject = "${config.get('dspace.name')}: Curation Task Report") Title: ${params[0]} Collection: ${params[1]} @@ -20,4 +20,4 @@ ${params[4]} Action taken on the submission: ${params[5]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/harvesting_error b/dspace/config/emails/harvesting_error index d14b51fe8235..40e4fa58e844 100644 --- a/dspace/config/emails/harvesting_error +++ b/dspace/config/emails/harvesting_error @@ -8,7 +8,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Harvesting Error') +#set($subject = "${config.get('dspace.name')}: Harvesting Error") Collection ${params[0]} failed on harvest: Date: ${params[1]} @@ -18,3 +18,5 @@ ${params[3]} Exception: ${params[4]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/internal_error b/dspace/config/emails/internal_error index ee622f4b3865..266c91b116a1 100644 --- a/dspace/config/emails/internal_error +++ b/dspace/config/emails/internal_error @@ -10,7 +10,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Internal Server Error') +#set($subject = "${config.get('dspace.name')}: Internal Server Error") An internal server error occurred on ${params[0]}: Date: ${params[1]} diff --git a/dspace/config/emails/register b/dspace/config/emails/register index 694be449a887..87b005bc99c3 100644 --- a/dspace/config/emails/register +++ b/dspace/config/emails/register @@ -6,17 +6,13 @@ ## #set($subject = "${config.get('dspace.name')} Account Registration") #set($phone = ${config.get('mail.message.helpdesk.telephone')}) -To complete registration for a DSpace account, please click the link -below: +To complete registration for a ${config.get('dspace.name')} account, please click the link below: ${params[0]} -If you need assistance with your account, please email - - ${config.get("mail.helpdesk")} +If you need assistance with your account, please email ${config.get("mail.helpdesk")} #if( $phone ) - or call us at ${phone}. #end -The DSpace Team +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/registration_notify b/dspace/config/emails/registration_notify index 96c87fa63d9c..0627d17fe02c 100644 --- a/dspace/config/emails/registration_notify +++ b/dspace/config/emails/registration_notify @@ -8,10 +8,12 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Registration Notification') +#set($subject = "${config.get('dspace.name')}: Registration Notification") A new user has registered on ${params[0]} at ${params[1]}: Name: ${params[2]} Email: ${params[3]} Date: ${params[4]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.admin b/dspace/config/emails/request_item.admin index c0443c60f8dc..ee8daa510d05 100644 --- a/dspace/config/emails/request_item.admin +++ b/dspace/config/emails/request_item.admin @@ -8,11 +8,13 @@ ## {4} the approver's email address ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Request for Open Access') +#set($subject = "${config.get('dspace.name')}: Request for Open Access") ${params[3]}, with address ${params[4]}, requested the following document/file to be in Open Access: -Document Handle:${params[1]} +Document Handle: ${params[1]} File ID: ${params[0]} -Token:${params[2]} +Token: ${params[2]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.author b/dspace/config/emails/request_item.author index ac79270e7fbc..eb9c4f38f661 100644 --- a/dspace/config/emails/request_item.author +++ b/dspace/config/emails/request_item.author @@ -11,7 +11,7 @@ ## 8 corresponding author email ## 9 configuration property "dspace.name" ## 10 configuration property "mail.helpdesk" -#set($subject = 'Request copy of document') +#set($subject = "${config.get('dspace.name')}: Request copy of document") Dear ${params[7]}, @@ -21,10 +21,12 @@ This request came along with the following message: "${params[5]}" -To answer, click ${params[6]}. Whether you choose to grant or deny the request, we think that it''s in your best interest to respond. +To answer, click ${params[6]}. Whether you choose to grant or deny the request, we think that it's in your best interest to respond. -IF YOU ARE NOT AN AUTHOR OF THIS DOCUMENT, and only submitted the document on the author''s behalf, PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). Only the author(s) should answer the request to send a copy. +IF YOU ARE NOT AN AUTHOR OF THIS DOCUMENT, and only submitted the document on the author's behalf, PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). Only the author(s) should answer the request to send a copy. IF YOU ARE AN AUTHOR OF THE REQUESTED DOCUMENT, thank you for your cooperation! If you have any questions concerning this request, please contact ${params[10]}. + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_archive b/dspace/config/emails/submit_archive index d3d62f7f4d07..ea1c31a75599 100644 --- a/dspace/config/emails/submit_archive +++ b/dspace/config/emails/submit_archive @@ -4,13 +4,13 @@ ## {1} Name of collection ## {2} handle ## -#set($subject = 'DSpace: Submission Approved and Archived') +#set($subject = "${config.get('dspace.name')}: Submission Approved and Archived") You submitted: ${params[0]} To collection: ${params[1]} -Your submission has been accepted and archived in DSpace, +Your submission has been accepted and archived in ${config.get('dspace.name')}, and it has been assigned the following identifier: ${params[2]} @@ -18,4 +18,4 @@ Please use this identifier when citing your submission. Many thanks! -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_reject b/dspace/config/emails/submit_reject index 44e6cf2cd9f3..f5376cb3a03b 100644 --- a/dspace/config/emails/submit_reject +++ b/dspace/config/emails/submit_reject @@ -6,7 +6,7 @@ ## {3} Reason for the rejection ## {4} Link to 'My DSpace' page ## -#set($subject = 'DSpace: Submission Rejected') +#set($subject = "${config.get('dspace.name')}: Submission Rejected") You submitted: ${params[0]} @@ -17,7 +17,6 @@ with the following explanation: ${params[3]} -Your submission has not been deleted. You can access it from your -"My DSpace" page: ${params[4]} +Your submission has not been deleted. You can access it from your "My${config.get('dspace.shortname')}" page: ${params[4]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_task b/dspace/config/emails/submit_task index 8c8b4a7e7245..f68bac80b186 100644 --- a/dspace/config/emails/submit_task +++ b/dspace/config/emails/submit_task @@ -6,7 +6,7 @@ ## {3} Description of task ## {4} link to 'my DSpace' page ## -#set($subject = 'DSpace: You have a new task') +#set($subject = "${config.get('dspace.name')}: You have a new task") A new item has been submitted: @@ -16,9 +16,9 @@ Submitted by: ${params[2]} ${params[3]} -To claim this task, please visit your "My DSpace" +To claim this task, please visit your "My${config.get('dspace.shortname')}" page: ${params[4]} Many thanks! -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/subscription b/dspace/config/emails/subscription index 2879e579075d..5141192b572d 100644 --- a/dspace/config/emails/subscription +++ b/dspace/config/emails/subscription @@ -4,9 +4,9 @@ ## Parameters: {0} is the details of the new collections and items ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace Subscription') +#set($subject = "${config.get('dspace.name')} Subscription") New items are available in the collections you have subscribed to: ${params[0]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/welcome b/dspace/config/emails/welcome index febc082e072e..1c22cb05e830 100644 --- a/dspace/config/emails/welcome +++ b/dspace/config/emails/welcome @@ -3,13 +3,12 @@ ## See org.dspace.core.Email for information on the format of this file. ## #set($subject = "Welcome new registered ${config.get('dspace.name')} user!") -Thank you for registering an account. Your new account can be used immediately +Thank you for registering an account. Your new account can be used immediately to subscribe to notices of new content arriving in collections of your choice. Your new account can also be granted privileges to submit new content, or to edit and/or approve submissions. -If you need assistance with your account, please email -${config.get("mail.admin")}. +If you need assistance with your account, please email ${config.get("mail.helpdesk")}. The ${config.get('dspace.name')} Team From 9bc7edb73382a38c94a765551a0ccd4cb6a08458 Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Thu, 18 May 2023 13:59:12 +0700 Subject: [PATCH 234/686] Fix: Collection's admin cannot edit its template item. --- .../app/rest/ItemTemplateRestController.java | 4 +- ...lateItemRestPermissionEvaluatorPlugin.java | 83 +++++++++++++++++++ 2 files changed, 85 insertions(+), 2 deletions(-) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java index e297dab44cad..a6dbf3496e49 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java @@ -120,7 +120,7 @@ public TemplateItemResource getTemplateItem(HttpServletRequest request, @PathVar * @throws SQLException * @throws AuthorizeException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'WRITE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'WRITE')") @RequestMapping(method = RequestMethod.PATCH) public ResponseEntity> patch(HttpServletRequest request, @PathVariable UUID uuid, @RequestBody(required = true) JsonNode jsonNode) @@ -153,7 +153,7 @@ public ResponseEntity> patch(HttpServletRequest request, * @throws AuthorizeException * @throws IOException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'DELETE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'DELETE')") @RequestMapping(method = RequestMethod.DELETE) public ResponseEntity> deleteTemplateItem(HttpServletRequest request, @PathVariable UUID uuid) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java new file mode 100644 index 000000000000..cb977dff3aef --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.security; + +import java.io.Serializable; +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.model.TemplateItemRest; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.Authentication; +import org.springframework.stereotype.Component; + +/** + * {@link RestObjectPermissionEvaluatorPlugin} class that evaluate WRITE and DELETE permission over a TemplateItem + * + * @author Bui Thai Hai (thaihai.bui@dlcorp.com.vn) + */ +@Component +public class TemplateItemRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { + + private static final Logger log = LoggerFactory.getLogger(TemplateItemRestPermissionEvaluatorPlugin.class); + + @Autowired + private RequestService requestService; + + @Autowired + ItemService its; + + @Autowired + private AuthorizeService authorizeService; + + @Override + public boolean hasDSpacePermission(Authentication authentication, Serializable targetId, String targetType, + DSpaceRestPermission permission) { + + DSpaceRestPermission restPermission = DSpaceRestPermission.convert(permission); + if (!DSpaceRestPermission.WRITE.equals(restPermission) && + !DSpaceRestPermission.DELETE.equals(restPermission)) { + return false; + } + if (!StringUtils.equalsIgnoreCase(targetType, TemplateItemRest.NAME)) { + return false; + } + + Request request = requestService.getCurrentRequest(); + Context context = ContextUtil.obtainContext(request.getHttpServletRequest()); + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return false; + } + // Allow collection's admin to edit/delete the template + + UUID dsoId = UUID.fromString(targetId.toString()); + requestService.getCurrentRequest().getHttpServletRequest().getRequestURL(); + try { + Collection coll = its.find(context, dsoId).getTemplateItemOf(); + if (authorizeService.isAdmin(context, coll)) { + return true; + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + return false; + } +} From 8980b07f2b29b00256c3020a40c080d78e5a6450 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Tue, 23 May 2023 08:51:07 +0200 Subject: [PATCH 235/686] [DURACOM-148] fix typo: remove trailing whitespace --- .../external/crossref/CrossRefDateMetadataProcessor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java index c7d806cdf4cd..dec0b050f396 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -24,7 +24,7 @@ * This class is used for CrossRef's Live-Import to extract * issued attribute. * Beans are configured in the crossref-integration.xml file. - * + * * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { From 50f808a7d003b0e185e790ca501546481e9c4d60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Tue, 23 May 2023 08:51:27 +0100 Subject: [PATCH 236/686] removing Person test configuration --- dspace/config/item-submission.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index f937a5fd9a78..2f20e34c6b3c 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -66,7 +66,7 @@ --> - + From 2f59554a5d6ab40dd6dbb1272e69d7a25bdeaac8 Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Tue, 23 May 2023 14:11:04 +0700 Subject: [PATCH 237/686] ADD: Unit Tests for fix --- .../rest/ItemTemplateRestControllerIT.java | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java index 55e82831f3d1..1fd9e81ca88d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java @@ -33,6 +33,7 @@ import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.core.Constants; import org.hamcrest.Matchers; @@ -243,6 +244,35 @@ public void patchTemplateItem() throws Exception { ))))); } + /* Similar to patchTemplateItem(), except it is for collection admin, not repository admin + Test case was simplified, since it does not do anything else. + */ + @Test + public void patchTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(patch(getTemplateItemUrlTemplate(itemId)) + .content(patchBody) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + + getClient(collAdminToken).perform(get(getCollectionTemplateItemUrlTemplate(childCollection.getID().toString()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + } + @Test public void patchIllegalInArchiveTemplateItem() throws Exception { setupTestTemplate(); @@ -337,6 +367,22 @@ public void deleteTemplateItem() throws Exception { .andExpect(status().isNoContent()); } + /*Similar to deleteTemplateItem(), except it is for collection admin, not repository admin + */ + @Test + public void deleteTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(delete(getTemplateItemUrlTemplate(itemId))) + .andExpect(status().isNoContent()); + } + @Test public void deleteTemplateItemNoRights() throws Exception { setupTestTemplate(); From 571df9b38a735e7ed3eaae8e9132d5c9a499e661 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 24 May 2023 15:56:03 -0400 Subject: [PATCH 238/686] Add a simple test for new TimeHelpers class. --- .../java/org/dspace/util/TimeHelpersTest.java | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java diff --git a/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java new file mode 100644 index 000000000000..12055140a2f7 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertEquals; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Date; + +import org.junit.Test; + +/** + * Test {@link TimeHelpers}. + * @author Mark H. Wood + */ +public class TimeHelpersTest { + /** + * Test of toMidnightUTC method, of class TimeHelpers. + */ + @Test + public void testToMidnightUTC() { + System.out.println("toMidnightUTC"); + Date from = Date.from(ZonedDateTime.of(1957, 01, 27, 04, 05, 06, 007, ZoneOffset.UTC).toInstant()); + Date expResult = Date.from(ZonedDateTime.of(1957, 01, 27, 00, 00, 00, 000, ZoneOffset.UTC).toInstant()); + Date result = TimeHelpers.toMidnightUTC(from); + assertEquals(expResult, result); + } +} From eb46a99dff6265aaf5d3e36a21cc2b8d8b3a7b6a Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Thu, 25 May 2023 09:57:13 +0700 Subject: [PATCH 239/686] Fix: default sort option (lastModified) for discovery --- .../DiscoverySortConfiguration.java | 14 ++++ .../discovery/utils/DiscoverQueryBuilder.java | 8 ++- .../org/dspace/discovery/DiscoveryIT.java | 68 +++++++++++++++++++ .../DiscoverConfigurationConverter.java | 9 +++ .../rest/model/SearchConfigurationRest.java | 10 +++ .../app/rest/DiscoveryRestControllerIT.java | 4 +- .../utils/RestDiscoverQueryBuilderTest.java | 15 ++++ dspace/config/spring/api/discovery.xml | 6 ++ dspace/solr/search/conf/schema.xml | 1 + 9 files changed, 132 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index e251d1bc5118..cd1a4eecb8d4 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -22,6 +23,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -30,6 +36,14 @@ public void setSortFields(List sortFields) { this.sortFields = sortFields; } + public DiscoverySortFieldConfiguration getDefaultSortField() { + return defaultSortField; + } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java index fa5cc3281393..92a973dff883 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java @@ -332,7 +332,9 @@ private boolean isConfigured(String sortBy, DiscoverySortConfiguration searchSor } private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { - if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + if (searchSortConfiguration.getDefaultSortField() != null) { + sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && !searchSortConfiguration.getSortFields().isEmpty()) { sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); } @@ -342,7 +344,9 @@ private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConf private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) { String sortBy;// Attempt to find the default one, if none found we use SCORE sortBy = "score"; - if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + if (searchSortConfiguration.getDefaultSortField() != null) { + sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && !searchSortConfiguration.getSortFields().isEmpty()) { DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); if (StringUtils.isBlank(defaultSort.getMetadataField())) { diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 0d1cc13106a8..0c3a52ec79f5 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -8,13 +8,16 @@ package org.dspace.discovery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; @@ -24,6 +27,7 @@ import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -39,6 +43,8 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableItem; @@ -731,6 +737,68 @@ public void iteratorSearchServiceTest() throws SearchServiceException { } } + /** + * Test designed to check if default sort option for Discovery is working, using workspace + * DiscoveryConfiguration
+ * Note: this test will be skipped if workspace do not have a default sort option set and of + * metadataType dc_date_accessioned or lastModified + * @throws SearchServiceException + */ + @Test + public void searchWithDefaultSortServiceTest() throws SearchServiceException { + + DiscoveryConfiguration workspaceConf = SearchUtils.getDiscoveryConfiguration("workspace", null); + // Skip if no default sort option set for workspaceConf + if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { + return; + } + + DiscoverySortFieldConfiguration defaultSortField = + workspaceConf.getSearchSortConfiguration().getDefaultSortField(); + + // Populate the testing objects: create items in eperson's workspace and perform search in it + int numberItems = 10; + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItems; i++) { + ItemBuilder.createItem(context, collection) + .withTitle("item " + i) + .build(); + } + + // Build query with default parameters (except for workspaceConf) + DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() + .buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null, + null,null); + + DiscoverResult result = searchService.search(context, discoverQuery); + + if (defaultSortField.getMetadataField().equals("dc_date_accessioned")) { + // Verify that search results are sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(dc_date_accesioneds.isEmpty()); + for (int i = 1; i < dc_date_accesioneds.size() - 1; i++) { + assertTrue(dc_date_accesioneds.get(i).compareTo(dc_date_accesioneds.get(i + 1)) >= 0); + } + } else if (defaultSortField.getMetadataField().equals("lastModified")) { + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); + } + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java index 73851bd94523..41cf235a878b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java @@ -80,6 +80,15 @@ private void addSortOptions(SearchConfigurationRest searchConfigurationRest, sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name()); searchConfigurationRest.addSortOption(sortOption); } + + DiscoverySortFieldConfiguration defaultSortField = searchSortConfiguration.getDefaultSortField(); + if (defaultSortField != null) { + SearchConfigurationRest.SortOption sortOption = new SearchConfigurationRest.SortOption(); + sortOption.setName(defaultSortField.getMetadataField()); + sortOption.setActualName(defaultSortField.getType()); + sortOption.setSortOrder(defaultSortField.getDefaultSortOrder().name()); + searchConfigurationRest.setDefaultSortOption(sortOption); + } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java index 7ec1b2250092..b25d827e75c1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java @@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest { private List filters = new LinkedList<>(); private List sortOptions = new LinkedList<>(); + private SortOption defaultSortOption; + public String getCategory() { return CATEGORY; } @@ -75,6 +77,14 @@ public List getSortOptions() { return sortOptions; } + public SortOption getDefaultSortOption() { + return defaultSortOption; + } + + public void setDefaultSortOption(SortOption defaultSortOption) { + this.defaultSortOption = defaultSortOption; + } + @Override public boolean equals(Object object) { return (object instanceof SearchConfigurationRest && diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index a115c8aa2f15..dd0b2fe576f2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -1286,8 +1286,10 @@ public void discoverSearchByFieldNotConfiguredTest() throws Exception { context.restoreAuthSystemState(); + //Update this test since dc.date.accessioned is now configured for workspace configuration, + // which will return status 400 instead of 422 if left unchanged getClient().perform(get("/api/discover/search/objects") - .param("sort", "dc.date.accessioned, ASC") + .param("sort", "person.familyName, ASC") .param("configuration", "workspace")) .andExpect(status().isUnprocessableEntity()); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 6c9544d2f927..511bb8f98b7b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -115,6 +115,8 @@ public void setUp() throws Exception { sortConfiguration.setSortFields(listSortField); + sortConfiguration.setDefaultSortField(defaultSort); + discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); @@ -167,6 +169,19 @@ public void testSortByScore() throws Exception { page.getOffset(), "SCORE", "ASC"); } + @Test + public void testSortByDefaultSortField() throws Exception { + page = PageRequest.of(2, 10, Sort.Direction.DESC, "dc.date.accessioned"); + restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + + verify(discoverQueryBuilder, times(1)) + .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), + page.getPageSize(), page.getOffset(), + discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField().getMetadataField(), + discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField() + .getDefaultSortOrder().name().toUpperCase()); + } + @Test(expected = DSpaceBadRequestException.class) public void testCatchIllegalArgumentException() throws Exception { when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 611e77b27b1c..3f0f507451d3 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -865,8 +865,11 @@ + + + @@ -938,6 +941,8 @@ + + @@ -1015,6 +1020,7 @@ + diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index caa646ba1b8b..df21afbc6426 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -283,6 +283,7 @@ + From b3a21ebd5a81701899753d83511746ab90f36cce Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Thu, 25 May 2023 14:54:46 +0700 Subject: [PATCH 240/686] Minor Tweaks --- .../java/org/dspace/app/rest/DiscoveryRestControllerIT.java | 4 +--- dspace/config/spring/api/discovery.xml | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index dd0b2fe576f2..a115c8aa2f15 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -1286,10 +1286,8 @@ public void discoverSearchByFieldNotConfiguredTest() throws Exception { context.restoreAuthSystemState(); - //Update this test since dc.date.accessioned is now configured for workspace configuration, - // which will return status 400 instead of 422 if left unchanged getClient().perform(get("/api/discover/search/objects") - .param("sort", "person.familyName, ASC") + .param("sort", "dc.date.accessioned, ASC") .param("configuration", "workspace")) .andExpect(status().isUnprocessableEntity()); } diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 3f0f507451d3..45e5829e1a3a 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -866,10 +866,10 @@ - + - + From f3b939e88f63fd83f56dd1ea7f29781ce398d4fe Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 25 May 2023 15:34:05 +0300 Subject: [PATCH 241/686] 94299: Add rest.patch.operations.limit to config file --- .../dspace/app/rest/repository/BitstreamRestRepository.java | 2 +- .../java/org/dspace/app/rest/BitstreamRestRepositoryIT.java | 4 ++-- dspace/config/modules/rest.cfg | 4 ++++ 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 454b6f8453d6..12e27dccacf2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -265,7 +265,7 @@ public BundleRest performBitstreamMove(Context context, Bitstream bitstream, Bun * @param jsonNode the json body provided from the request body */ public void patchBitstreamsInBulk(Context context, JsonNode jsonNode) throws SQLException { - int operationsLimit = configurationService.getIntProperty("patch.operations.limit", 1000); + int operationsLimit = configurationService.getIntProperty("rest.patch.operations.limit", 1000); ObjectMapper mapper = new ObjectMapper(); JsonPatchConverter patchConverter = new JsonPatchConverter(mapper); Patch patch = patchConverter.convert(jsonNode); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 2a1044c28a04..8b34edb938a6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -2565,7 +2565,7 @@ public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { context.restoreAuthSystemState(); // Add three out of four bitstreams to the list of bitstreams to be deleted - // But set the patch.operations.limit property to 2, so that the request is invalid + // But set the rest.patch.operations.limit property to 2, so that the request is invalid List ops = new ArrayList<>(); RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); ops.add(removeOp1); @@ -2577,7 +2577,7 @@ public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { String token = getAuthToken(admin.getEmail(), password); Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); - DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("patch.operations.limit", 2); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2); getClient(token).perform(patch("/api/core/bitstreams") .content(patchBody) diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 6421258c575b..657e02b58de7 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -25,6 +25,10 @@ rest.projections.full.max = 2 # This property determines the max embed depth for a SpecificLevelProjection rest.projection.specificLevel.maxEmbed = 5 +# This property determines the max amount of rest operations that can be performed at the same time, for example when +# batch removing bitstreams. The default value is set to 1000. +rest.patch.operations.limit = 1000 + # Define which configuration properties are exposed through the http:///api/config/properties/ # rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will # respond that the property wasn't found. This property can be defined multiple times to allow access to multiple From 5025fe83f42a27f7072ae72f93f90ca08676cedf Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Fri, 26 May 2023 17:03:29 +0200 Subject: [PATCH 242/686] [DSC-1105] Align imp framework sequences to pre existing Ids --- ..._DBMSImportFramework2_with_current_ids.sql | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql new file mode 100644 index 000000000000..9d5cb69e43fb --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql @@ -0,0 +1,30 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create sequences for DBMS Import framework +----------------------------------------------------------------------------------- +do $$ +begin + + SELECT pg_catalog.setval('imp_record_seq', (SELECT coalesce(MAX(imp_id),0) FROM imp_record)+1); + + SELECT pg_catalog.setval('imp_metadatavalue_seq', (SELECT coalesce(MAX(imp_metadatavalue_id),0) FROM imp_metadatavalue)+1); + + SELECT pg_catalog.setval('imp_bitstream_seq', (SELECT coalesce(MAX(imp_bitstream_id),0) FROM imp_bitstream)+1); + + SELECT pg_catalog.setval('imp_bitstream_metadatavalue_seq', (SELECT coalesce(MAX(imp_bitstream_metadatavalue_id),0) FROM imp_bitstream_metadatavalue)+1); + +exception when others then + + raise notice 'The transaction is in an uncommittable state. ' + 'Transaction was rolled back'; + + raise notice 'Rollback --> % %', SQLERRM, SQLSTATE; +end; +$$ language 'plpgsql'; \ No newline at end of file From 4fa51d03d11259cd67506247be406d5b7faa7e35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 26 May 2023 17:14:10 +0100 Subject: [PATCH 243/686] adding support for access status xoai plugin --- .../AccessStatusElementItemCompilePlugin.java | 67 ++++++++++++++++ .../oai/metadataFormats/oai_openaire.xsl | 78 +++++++++++++++++-- dspace/config/spring/oai/oai.xml | 4 + 3 files changed, 144 insertions(+), 5 deletions(-) create mode 100644 dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java new file mode 100644 index 000000000000..65ec251b21ce --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.app.plugins; + +import java.sql.SQLException; +import java.util.List; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; +import org.dspace.xoai.util.ItemUtils; +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; + +/** + * AccessStatusElementItemCompilePlugin aims to add structured information about the + * Access Status of the item (if any). + + * The xoai document will be enriched with a structure like that + *

+ * {@code
+ *   
+ *       
+ *          open.access
+ *       ;
+ *   ;
+ * }
+ * 
+ * Returning Values are based on: + * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper + */ +public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + try { + String accessStatusType; + accessStatusType = accessStatusService.getAccessStatus(context, item); + + Element accessStatus = ItemUtils.create("access-status"); + accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + + Element others; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + others = ItemUtils.getElement(elements, "others"); + } else { + others = ItemUtils.create("others"); + } + others.getElement().add(accessStatus); + + } catch (SQLException e) { + e.printStackTrace(); + } + + return metadata; + } + +} diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 7b66eaf04372..19b1486f4cbd 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -93,6 +93,9 @@ + + @@ -658,6 +661,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1207,7 +1244,7 @@ - + + + + + + + + + + + open access + + + embargoed access + + + restricted access + + + metadata only access + + + + + + + + From e889abc6238b257bbde537814e011a842f4512d1 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Fri, 26 May 2023 14:21:57 -0500 Subject: [PATCH 244/686] check that zip file exists and has correct MIME type; also make sure that common temp imports directory is not removed --- .../org/dspace/app/itemimport/ItemImport.java | 49 ++++++++++++++++-- .../dspace/app/itemimport/ItemImportCLI.java | 23 +++++++- .../app/itemimport/ItemImportCLIIT.java | 24 +++++++++ .../org/dspace/app/itemimport/test.pdf | Bin 0 -> 56812 bytes .../dspace/app/itemimport/ItemImportIT.java | 6 +++ 5 files changed, 95 insertions(+), 7 deletions(-) create mode 100644 dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index ac9db7605103..bcf7afed38b5 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -23,6 +23,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.authorize.AuthorizeException; @@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean zip = false; protected boolean remoteUrl = false; protected String zipfilename = null; + protected boolean zipvalid= false; protected boolean help = false; protected File workDir = null; protected File workFile = null; @@ -235,11 +237,19 @@ public void internalRun() throws Exception { handler.logInfo("***End of Test Run***"); } } finally { - // clean work dir if (zip) { - FileUtils.deleteDirectory(new File(sourcedir)); - FileUtils.deleteDirectory(workDir); - if (remoteUrl && workFile != null && workFile.exists()) { + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { workFile.delete(); } } @@ -329,7 +339,14 @@ protected void readZip(Context context, ItemImportService itemImportService) thr // manage zip via remote url optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); } + if (optionalFileStream.isPresent()) { + // validate zip file + Optional validationFileStream = handler.getFileStream(context, zipfilename); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); @@ -337,10 +354,32 @@ protected void readZip(Context context, ItemImportService itemImportService) thr throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } + } + /** * Read the mapfile * @param context diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 1a71a8c4c09e..98d2469b7155 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.FileInputStream; import java.io.InputStream; import java.net.URL; import java.sql.SQLException; @@ -101,6 +102,17 @@ protected void readZip(Context context, ItemImportService itemImportService) thr // If this is a zip archive, unzip it first if (zip) { if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip( @@ -109,15 +121,22 @@ protected void readZip(Context context, ItemImportService itemImportService) thr // manage zip via remote url Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } } } diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index 411e8de4dfed..02a0a8aee048 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.File; import java.nio.file.Files; @@ -33,6 +34,7 @@ import org.dspace.content.service.RelationshipService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.flywaydb.core.internal.util.ExceptionUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -46,6 +48,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private static final String ZIP_NAME = "saf.zip"; + private static final String PDF_NAME = "test.pdf"; private static final String publicationTitle = "A Tale of Two Cities"; private static final String personTitle = "Person Test"; @@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private Collection collection; private Path tempDir; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -226,6 +230,10 @@ public void importItemByZipSafWithBitstreams() throws Exception { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test @@ -254,6 +262,22 @@ public void importItemByZipSafWithRelationships() throws Exception { checkRelationship(); } + @Test + public void importItemByZipSafInvalidMimetype() throws Exception { + // use sample PDF file + Files.copy(getClass().getResourceAsStream("test.pdf"), + Path.of(tempDir.toString() + "/" + PDF_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + "/mapfile.out" }; + try { + perfomImportScript(args); + } catch (Exception e) { + // should throw an exception due to invalid mimetype + assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass()); + } + } + @Test public void resumeImportItemBySafWithMetadataOnly() throws Exception { // create simple SAF diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf new file mode 100644 index 0000000000000000000000000000000000000000..5b3749cbff73a41baf8aa06b7f62339886bc6ca2 GIT binary patch literal 56812 zcmdSAWmH|=wk=2?K!Ow8-GXo2-QC^UIBXn(TX1&`1b26LcMmSX-5rYL-0$9V+Iz36 ze$=0$HQJtQtuf~4W6ZH;?`sR0ys#)OBOMz&3|Za#A3O{rAp;@E&;lNYo10$wn=O!D z$iUIS3S>$zZ(s^^AOyT$R-~6UurZ|s+R&;fQ4!KBIodlJIV#%&fr=oIBOxQpUlSPv zdrJu$6VUrKe}+I~!gn(uFE70e(9yv7eNI9qj=zSg=0F#qy}Uip1ZWSmF?x@|%<$&| zFE2a{(8l;LmOoMd1JqwI^gNk{)lp2z(ZCV-eqBt7jgak6u=f!=A^V@v zyM??x$Vdt3s73!?QF>*dt0TRH^?S(#|MepDuNR3w_})ukY;GV3a@BfwWgui_1Q4=u z0CWi7^RaP!NA_NUf6ZUFwoiD2&gC~ z_&%?)fuX~@$-frlUs*a3GW>f%{!IK&K}5~19D(-qqE_!&2?LEl#z1;$ppB`c86hKp zmHCgCgQGprz#1OLHN8x2J!**^$xCd@JK3Ei;gj<-Nr$w zOT0dWYUJgH&4{99vU3X9BV>Dg;lZ>GGQq|4^joM9BcM35@rf9)^fYYJpw@aVCcpM0bz1drdfU8G6jA=#C+*NKP(v>OP83QJ{k^e zO036Hyh0Jb^606Q8xp4L6@%Pc67}EIp?wI!U3Bqyzxs0q7ars7_Skbdx1y1f7}Q+? z#*OrbuHf1WHj*5*T^2#LW#o5Kq^gGc0w~{e46E`?02KY3t=Pt7)06l|fU4h)&@Utz zLWx>Mk>iS=N}&(q`L6ub>op3R>{vVG2F+5Rq1&x-UpTiDdKUu=q&ZwF4aA_sZWujy z-DcsNLz@U1rub0FMAI$Cm#U81mrALc40KB?0jlNTi_&@p{pjFYP3@p+EPQ2&IlDA1 z6Ex)y1REelpHT4w+)#q+H+4g`p^m|RyitkzMj=mcG%eJ7_;T*US0U?&4^o=yv8V!X zgO^_7XZ-6**B$*rn$bgOdR1I@8J6JzIr z$Le;hD7c7U@+BbR1v5JDSFH=3S?NOE<7blE5~&s&L)HnmR>4kEyK>$(5$|PW>pN z=W0gx+bwskC!`zg$%6OwRhmK$UI-7d<%T=glf_grj@ zu8T{UO0H>RNiE<}W<~#_1`k$O2syLDmK~@=7ww^ddIzo2|KxDZQGx zjew1V`F|h(u@?q97}=ZKI)dy8S^u(tsJXp^qmY?_Js}g|op}Fi%m@IyyBj;2IS?}a z7X$qdUH)@fL&X1)>whyf zy`T)eEXdy4z>40;fRORuY4A_Q{vVx$1pd?Mj}-V1r~hn#_by=gf3@lo#_y#xcl<^x z{$I5;Hn4I0GnWG)<6q@NuMASLG5h$iGAXYw{O{79rywvGy)`{%7U?p7{sl z|4j@2)3!nq!ha(F>0H3SbjIJL-cKuo#3Y1e3~cHDflDu}_Kxp;?cOgceRFtUsXy}W zT`T;-@OQ!fr^5&t|B5ksHA1GpLg_ye_;)cUXKxI&e_wpcf1nC2K)~(^Eb#p$^Q2(`9CQBg8Kh+pZ`tZU8Vs6giL=oga4@GziT;V zb4M#6r2yd{-AVX<2iO}JIhuoP-j5v|357vMPJgRs^1i7EE7=+t{i&*elcO2Pp7M`4 zCsYH0jNeZQz01A#jfe8}4NL~i9LxyJ9t;HL@cwTC<_J~}MhK?%KDGxlejl0q9TEO- z>W+ng&fR=%Uf`NhJ-Ao2#{QqF_F0GB7 zjDY`N0sHqV0{*=l|Mrlap@ov8HN7k$BM1H8z4yJDncheLC$Y@T0{AoGKXmb~m6c^I zxB21PFdhjUSAOh{Y$H;B2hL2(V+-Og3N7|9Pm$->7qFw4y?N$^FE>k39s%jUP1pPo zC5gx;6Q6`C1<&w_#jb?}!tbNjO7oivt50%ZN=Sd`H!&6@4pE3!H)Jj%Oa5q8+=VHQ z2K-7m20Q$*DKSftk{tvknFpgRo2+CHVEL}C8_GdVN+V3QYxr;(kPWwLfLCs5)w>Gw zn;z+k7BjM`nUn7cHUyh~YMacYl@yD>L&p44xB`qOeY-r}WG@L-a2B3{CHGPtPHhy(ir( z8WpyhS3TIJU_8$!vU{D78unQiMbcP6x3y&@=pz~1ZYlR`iclC|*KfTriq|82YBFCc za{`DIjYdNy3mXwUS97LwA6K-~p91lZdzDlZwS_QXcp6UgPJAgSwH7#g$CxYpcj>o{ zoI4@6!bb}QdiFIT+v`mAMX+Uu!i?C-AWA@6vC0Lqo^|*&SJ4^y#Iz*C071Fn0O4BX zfYZ+sf@nW^oSWvgis?-*Y4dk-6F4l`aHbVQtVMrcut5&NNJt?3M&RQD5Z?*o2e0Vj z$;;V2`NuXN-xhiXdkLq$)Zh)EHZ&dc>$DfH+0>kIZTl(Lx z!M`h;|MMC!voXAjiGMxJGQN`*fSvU(as1Ccdh7}75#Kj=my%)%l3;D|U19qr#hM12 z799;6T^UQE=IaxXW{fN?*r-Q}$-jvfqVmxn=If1a*H_8D`7u#P0qUuR7L}^91<0&ajrsJbbHJ^Pkk^4v#%tm^tM98Q{ z^0j4q(S4Se+H1>lj)a}i6s+-)y>mV1U`%&$U&$3lyoirft~ht;bq*V3h>H2dQMWTY zjBi@$5v8Y4oTSN?Zl3*`tQ{gpIh;Q)J>s%^e8BXlAgvI1`bB9yo zJcmK2F~8<~HdnEOJTmBWKb~iuC}Ux4T%0T#Lm45n5a#YuFHTs6woBKX#7G(4w!a^)r)5 z++RSa$VohoFVRK}sHQj0yZyBL5=t(k2VU4Urzba4=4jQ|&{3=FDhU9)s39|*{J12n zI!qEdAo*2CCie;T#a{e7rKy&{E@KMxKZfo(kqmzg>BV(!f_2kQ4T#AJVpw?qU(QuyRc#4{YmOT1f;$EZ^Z z7=>U#)NG{hl+SOUtxR3d_?83DU^$mNqRReWIG^|^y4fb0m5}{p^4Cn5=)XPw*Ws&O z7PrJzdlzafjGHm<-FS%9p!F*gGA>L%!Mo$?<{kOFY}{hfN=_n*jqFYe_|hEZZfqcpJ(Xa8glFD?wkHp7+4GXGt` zQkwz4O>)2NHfL_1&k@Gm!Ja?ZvEjpsd}2|YQ^ouvpTFgVtCk8E!TUTz&i!6@TkGZ6 zb)!ZN;dQ?zgW!kI^@4ZiR>y~j0VLIF-b1DefSzCI<4gk9UJM_(9XUTG>MBg%bwrmg&Q%~ZL#MIbyc2{}7xi6r| zun0yhc()8<_8dPq7oifS=oxHz`xT>p)ZywD`0#or%(4#@>hPvVAJfZHQ?IOK&{D5n zC6yw~iVK)i<_<{o418WDgxHQRkX=FBM zjbrJTjX(1D6hp4wlsTD|Q?TaG>u|BZ?i*0$q$F-q-6Zg&hnrxc ziMuS!K~JwZP5b$r&o+#GW|pxiuc&F#e(m-HE3tN&K!|Q3*T1m%s$AlPGPwW+r^z<{ z-JZF2=$F#uf~WPMWQ|svox_U5k_jucaGKt2%!WvZlW#mrkfI#M)P2^{F1)Fvb#@D{ z(@!PzmKHX4h-C_vOe&|L*)uVm52xm4CpP>83*Fx%l+yRZD(={z7u_894?e^JIU#; zrsZ?aj3=U^U#p`K+nj_nz!`6inwsqXh&AVs(b>hO$h@SELvI=&#swdDrl2u*Hj*X* z0$fO)8lnQQJ169+8KAGK81##+PwG|uKnXVxoKbTp$FHFbZ>K$(nPcWS8~Z%-72+nq z_;eWNLAhNBU6B4D5*A^g<|w5VZdt+zEUfBx*UnV~C%;b5*^le>57c2B9!XNNXZg{R zH42{`Wh^4&GnkjrN$UoW5YJVq4!Ec!Qpw45D98$m8=8>^5G}O?6g!h04vnhF5zF=D zvGj-DB3x~R*Vy}3+08|3YT8?zIvXOCCccXNCXpM0vZo>T>46A^{(*fYx-au%iU#X` zpBw)4@q3w3<*dGuzO8jlrJXnFfVH3{%mvq#@<{b0F2%AUw;S#2UI3iv$I5<#@j#zC zCu!1~fky518bwME;rZ#o0}BWv7-hE!x+u;PkHl|wtKd-79IbYT;isp`HOBCZrOhok zE8~Hu^QZOljh6Mx=1wkW$Ya?}EjjHszpsP3-PwMD!!JBC21AxsS9a2pt*$z_iM!^r zLMv+_&HzX(*d8EI+Pqx!>laS!*(O9NcoIv8ZJSNIh5ZV@A7Y5-st73}O_Wuo6o;lc84p|DP>@)V|K6Hl-BHkRBa zW?hhLfN6wde2!D@mhdFLBJr1-+%m+-Q<&(O8dj* zspk|9&2;-Vk%wUXYA~8}FmEDFR7U6hSt%@!k&X_r@|JGRXl3vxkNxt0J7sKj*%vvB z3hmPa&dm$N7_3Dd&dda#G+`pJKihW%Ae)wdw04gv;a`?oz5q{y{+eW4f>W}@Qy!GL zH%B>RcE|4Wf@>z;i<6D3+LZ3_tT;lOBQ$>grW%s*lk|?K?84@#Fpi8>UhqTaY3FlI z7_2NB4(ZnSLo93!!eCQcRvllF`pz#Mckz;1S(f!RI0D*KBu$DX9_78NG{+7QIw#il z5k^xDEnS$Ui$9Ba0FWaVR^{!5=3-f=cA5z)I+?A3eKU;lm-o@ug=U0oXnLD%U zGc<@xt&-39fLG{%zjkxV+QgqqylvjC3pqW{1L^b`WwQYiZn0{&Zp>+92QL~0=fcYv zgpH9!HTI#}$)%+Q0*>4bJ1czG0sJw&Dt2RL)2U%BVIcb3B`SdJGBBVi)y3Gd-mPH- z5$$x!UgM)@GT9EBw4}Bvnq{#!Q8Kn+5G0Qz%S_x zs)^5Ll-XvN;v%yPyd1uHYd2#KK%* ze*nk9)e2sac7l^Z^Zw13Cv6|h7u>_X&egk_#XE!iCG#$nD6;sF)Gk=Fub@`OLmb)% zMx(2WSPk-+pBHZS`%~{z+szG`#iv;=<(#h8=U5&^BjopG!6LAY8kR?L3(jS1aULglir+yX zf=7X&X_B87ICYM72E8MV=JXNi42dOi4$-M}fIifUM*ooXfeix9_4dO##)$GUYM3po zU9e@JswDU-sm=6$Th>7qAJjU68{2TK4G!h{hp#&gi02?74y^0>4{osctdnU9iPu38 zJ1T~Gs-QlwRXR?8=b*YJgeDYqIMhLF5gTgl1{K}fXn9j$Eq6mAYA_vFk%NYCfxI?U#z>X}mLr|NTBr!c<57HrVyq}voObC zHXpuXTEpJU+7VSkdYStwU<@x4C<+e2tVBHv$m2<(?`ve4epmo6htwAQN+#2(sz2bP z>@8_u9XZQKOxCF_zTFiuK#54qL+sFLN137k$T1F__wvF;*NoxxmEr$+fdIaP0XrZ! z6bk*jjK7VXFZ&=pzNngxuN?jN?M@9p@A8j{`rk}*YAQO}CPY}wW|NO&+XUDhpy!h9 zgv5)H{t6s=(SUBxHUW@bh-eLW>?uNiUQ&3aa?}=LS2cR>vJbS6AFgsd)!c4S$rwt>*u$JjZK5C@;EI%XZY16_KX1?6+&6U} z;C%$}J^;K80bgTAT33L)17t@)eU0RA*@^wb@vEhI+qKMM5l+dOPRRvaqa?Vy7ECQ+ zKV6Ea6k8IPjECD@7Zpv3W{i$ui-n|U_D5VcBFd4`5gv;L9{t-L1Y|#Yy7)em?Y9hF zus2RQPZ^h=jA;GQvp1MJhujiqnE`2PY84VUv7Wnd7lki|qc518Y&TuCub6 zVcE9bAl4xM=d8S$Ej9oiD^PfwFlP=E6wcI-86n!Hz-Ui`g(9@+U##XOmsHpwnXP7A zZ1#;hi7Lt?Vjv~P=;k9-nCKdNULYhCi{kcdYUd}A(ICkubI{h{-Erg7KRHwzChrHqpcvJo{Iz7+-fq>POdhbt^a@vX&Q z>ntYs1409v&%`yM70=AYZ+?6(N@4L$!zo~f9wBfgtm!A9oVwg(xQy_1*uju+zqfBM zZMQ)@i|$=-4Jz9lIiir_)l%By%M;aTg|PS?CtQ4^n5djXGhb2^u`r8Np>gBHqoIME zSFosF6u|B#sgz1!PK~3JRYoq6)qfISR+lh#q6qJcY(8plVr17#qiK}LK8-yREy;tA zs0_ocm}eRQ4a4&TIn{N+ylNSRaX=2gi~#Y<@`b@!lY&0mUmtUFQ*__;Ery|C_e|cT z&H8NIJV|e_uK8#WTni#il~z1KjmLB%-37XRZ^S%fwVhv4+j-4tq6~J_Zh^ynX{keK zg|5%~v>D%`B6DeFrhL2%w!X;v=Cq#E`^@vm0%zjzC{@XGuKX#oe}1Un0;qS#;uX}W z5PBBp$uiuI{eE0@XYR=&TW$~`GsYuYe6cVdKB@|dOJ_kKJ&&d~e^`Xzi$u+E{Zzgu3T1aD8 zcUL2^!72za#WTgk2--r(8`jw=8ZKG-iP{v#E%Hr-3pHqY)9yEiGbJ_4`2uUnyl1&CC8KboUii>;%v)S2Hcwbnoc9^~npT#*!mwB`wFdL@O4wLTP9 z%#7-M4vnHISEquW{(MxeC4-kw7gw2@o^KduVF5Tu3~@57K|)X*xao6KSQ>pGTFmx|+`c8{ct1T&)2Mdy^g5FSSvxqP z*KK>yyrsGk!9z9s=5wVw@^ZKm3FPY2s;q1e$WF#f(_nRWvqg=#i-KMzw z(Ac%D%h(vA$@*`|oIufl`Am$T?T4|-llm&=(shtxPNMEH?VmiL@7*`_d^i<|C2RZD zMCY?Q;yByjdWca~2Xkjy{JzB)0Gw4F-P%YM=+t^ql-rJvRisND$ef(NWW#=x#BNYh z)uK9(@bO{xT)h!)zW5DhV%ES3(}BqS@j2I9`@YeJvFQopJ0i@(!i+bkk1&37O1%Or zoIqs$9u0sq+FHv4V`nH(b{Y7I~PNDeG(2vD~l&A#LraIqDz=-5M`Lfd6`qX^#L+JiGL5AgWq=xrS*8m3x3;7>H+<{ zCV7ko2DzKT*%K&$pCTXaLut2ROoeT3S2CxW%)A@}ZmAF=%4#L3l$v|O%u!GB7c@xh zQOxR2X=iM3-EFem)nSfc2|jsC%WXt6V9>t!uy$_|-dz05R&B^gj~CjT{5A`9;61r) z#ehRTYirX;?t#;IH-oV8(2u_kN&71DWUa2h|V52NXxvfg2E!z>t8?)yZmRJiGN73fr*sm}KM z50@NU;H+OTZaUCy;PmqFTCOXplVA$NMG3ujiBi#f+7Y-hW`K^6PBS;bNjT`X-#B1fAHY> zQRczt?e9$t5ax28qT_Z65$j!cJ))FUSMxIsw~0ITdXWm`?!oM_+NO^r;AQ+B@Yd`8 zGMl;PE$}w>B6sVJ%gfh_ed}Hr9oRh$UwO&eyMe%sNce@|r|AL4b-u|FMmQ55F>gW> zGCX8*-B)M2NLiy_A6Y+7FmuoRg5$QfV%737#XBW88*bc+w$al8meve~V zd9L4KjWW__y=A&Zbzyl?)|~*JCw@V-M}3t*ouHPm5Z@4Q#2Hks+raOVbV}7GqnDqV zub11Q@)rGy@?z(|HW{FF4-C}9=@vzFF4{Wqhof=NGu?(t*5z9u!`Ly%4}mj7%@xva%J)Rz=uCW;0u|qL7p4usM1MH1l<+83cijSlmRV~n4Hlev?@x!XlGmHIK z=m&jHAC)%WbwA09^Y0!gEY!eeqgo`dw~$lCdw1sb!N6cWYL}^??y-*Ex(kzIZ2B5I zN!E^NwMv~uJ2?g}Jd=?k|CeaxTg)d~*v-NYrIm^PEA~-UpR`|}R>*bH>258Ng}ls3 zMm8l%LkKo!dbb~6vT^U4UaqAydp=T&9;XL*HGiVkr=IBhp&GUNTdg!e@c{k!PV9*w ziL3sUc9p#M*J`#8xf0{p7QF)5e1DK@>Vd`ApKZZi5z3>Wype7lD}>vZ&x?Ep+OXPsuGvvkBsZ_Wq(b+~3NDxW#o3t10e^gz&0&C%80=LwrKvtQb|xKY&3sWvkfz16<^ zGpDoeKR=#{X&djP4Q@vcS!12Xqh~ORTzJ=JyR3Kr`fC z!AT>@n*Yp_Kl{%0mI-n;&-KSsK{<=NepA~SR|JYij03XtJ}-fajn@aMN3>J?vq_LG zt9ul}hNBbGUQJZ+$Xl3f5|MXjaLY<;ryIxt(KPCU!WNO~+*4qhis-es$IlQBP?M&R zs!*{E%|X$1In;|@14}h$`Lg8F?egQM5Y+vdK!pgz6KSkoRTdN!H77?n=quibw9%mMpv9WzK2+#4J+d;la6W!>W00c`y^CUFPzI{d1H^jesUe1_C8^rz zIbYXvH9B}bu5G)sLbH36of?N9rz#EWwVy=$N3ajAiifz98GH2O1mrDL&}P3#8qhj~ zrC(NW=8+9agg_A~wQ)(=FZ!jMcv>qc)Y(r>9v=SWXVrtFlB>YxYiFEuZ*6RruZAtz zDV*O%zhD;yiV8#S6yn3@@nqiau_I%^c+VW&#+%nxHim!i&b&5cM}^_5Fd>dh3?=m> zjpZ9)6w;Q&jT%yq5=gU)&0MlXI=IN{5*ENb?J;o(|3W{Ik^bXHEK-f~!$f>ueCL<> zWk7v1dYQzHTgMNlt)pOAsAt=5HL`+{k;Dn?d&Nh?Wt&hn_>gJXyaW0qaf09#CXWry z86s`GAEROjH>z;u`s2uH|uXR z9UmN8;gE5pa&qZs;S_JG1X+p_cF60BTl;$JQ}6+HGmq@%H8)2)818NM!$>Lu(*u$% z`tf_COiJGUentCdA=v8)ZSY8VZ$L&UO*3zcf9@5EogHMs$m)5gwvmj zoYV_L+fZ;(?VD6FLu=QM>44tfzvD^O9?W!q$S0=7ZI1(R4YsxYJ(-@q9S#0||EP6U zDTmwTt_mI_s@5SY$P_`VXML5k*F0(cWem=nQj}=7+Ke(RX?xuopc%@M#0Q0=p{=W` ztgXUb)tGk=m?58uY{WabTa@PLXLo3=B!|ry`N<&lyzzdE|5k-aa?q``VZgyPj5>}q z7e@)$VbQ2-ntN7%Ac)yT#dJe321t>QtVA2>q&_PM*ag#iGK)E(qhg%<4K!lmm@?u_ z9W^_MlEUy+ABacc||N8P>0!{BW(;8mTmls5Z~8wgucWx@kkj<HBdDq+JINjTzg@k+z& z`oqy}a*3NX#@xQWjfjX$YrVI?+c^hysE^Z0U_oX*c$KSi8snNFqn6PzxmcW3rZJY4 zjGTDGFMuM`4c&tfgj^G8Ti+NE`vzgojDfG)c~^Mx;Pw-jh!J!gbM~4cp0-JSZ}tVu z2)mTOQ>kb62q?uSBXQA6&o~97qpt@%QDp6P(PC9+gKO?@pRxLeyi^N zqkZ5tc1PssDusT@+vwZKW6BQ75ydTtc3r(=?0H^SznXcev1dFtj&jCkf}w9QYYvoGZqtrgOZixhBoSN;!^%d&#UL|GNH~V23yPRsXc$pkv~tk6t#JY1?V1-}7+wmHm_v+J9F|m3`w+|Qt`TxLE@U`DQ!-M1r5*Rm>Q3^M zF^$LrMV9t$luR`yw;_*WDXu2&DmFAIan>}7I&D04#^m?fC`zeuS^E2zxEfTQm7P^= zDwy*z3X+(SZ>z7Z+;uf$viG`w7-p(BGd|rOH%P&cOV(8vRcADWsp^!p&rNE!O0K7@ znIBbF44#ha#JMEAS}f1qj^*hj(4)60U2ELxRnI(&hV_`(12ypKjgwlK(*hLvcXY+- z2ZsqX?9-0D#kuca$^Ct?A0T+0SP;K1iQ6QII-ZgrxDhIgth6(f~inT9ZbNW8;tEaB(Tj&*X{o;%KuNUss+`{BLgNvvfCkF>&DXsaIMGKNFf_ z5x^)Hrq1Q* zytn4as}^(t6@ne5<280Fm!Vx!*C)2$<*ks$T(xpIe zU2t2u)g(jZP*Y(1(5yrCZ{=+w7McQWItnydNPnLw$6HsTa~Rz|OIMkSKCP6e?a$$; zX9;%Um3?`BSV5Aw2z`hH1zV2V@W|ocKlKZKH2Ucl>GbK75e>)S{HFhL*W*z=5>~EJ zz*uydyr}&tL>qUt#UpWa=v8OPY~RWUfh!8b(?L#k4F0r2FeyAi&oL?Mwc2{4+*$8b*Q-#SPgL$K! zwws*byFS$$!C>1?U5Wk3Y`eK@nQw#FGJShl$=9ldJfywepv#kJak>4qKHC0pOuZns z0l@S#Xe14tH6<||vNNv2U6DIh0l>0}+JrsJZMSS8Z$I@b`|4rf8wNG%JkuRfat)ts z-)WS=SEH&Nv1m-sv4gfg?5HG3)k|^*$^6_RNtaXrP}?C3Wb5OKT$>?q=bQ4^OiHt; z6|NJ4TTd7Hyj2xmJClCdDHC)3!K1k4Y=^q>u{wTxCqiv#Wz{AZ~;5G2xqVh%J4AI|Dh<(7E$6;RZ2+N_e+{0s#cNVsUlw2;WQ%Czcr%RpzAup;7F#t zFloy0!37X)DdRaTIM#|cVU;Ok9nz~*lVmL!7=OIQWg2Bh)U_grxQ-@eUi0?N0n{16 zgob7SSpjVp*nlFFJglles-0zRT?0^EuA+I#dJTWge${$pd*mABs)v1lbH!;8(0eMl zk5w05{e%z7} zA4$350+qvNKEfj+e`E;apFYxRb;OJrvgzocJjM<&LXxwe#-ZdSa$`~4&61sjo)*EjUin@Vt{KQA|=`Nlf z-6?4a3H@A+q*#qh?V}bHVqz(dtUxe{hcl87|J20*N!0^-EYjt>Rx?``X5Xn5B}r&{ zBRBHr+#9$^-i$2TvJ0ElDuV)DKHUpFE394OdIx3-kx0!2Uh2%q1p>tT8)dmj_zj8$ zO5|zfij>y7nvbkfqJiHqMf#T5vr%0eXC-=P#TXR|bOn=Z&$Q-wB}#E&fHgD?y} zA=BY+p{^glekm}+$MyAB{_fT_3iaka-D6RQTwN8D!wPP(EO66&DNI{{?TB_ zu0A{nBZ3}xN+CXl-#kLiHcp;c-Z8g3g8)bP5j%>Mhd@%ZMCh~7HASugVVCMpDL-+X zE1%|+V#hzv<>T(W3MuuU$X1r-~FL*#xma6t3?s+nHmhq z`BTuI)R-fo`l9VTmT3hsHiXJ6nRaAbaIpNZ=={Q}ar&&!$uDYtWo)r+3jNXx?m$@& zJPVO6_+(8quWmAFZJ1W2o$D{ED2>TXtr;#KJTHKKLi-W#XSVt$jBrIuxg(hkgi4_m ztTmez*xS&S2#AfeewVK8{>^!#GWsChFBDpDNRMT%%}$)x<@YwO#WxV=>pweime9_J zC##Ri8yRcqo>C@HHZl4#F)KnVutA|9>~ag;+(E?*&CHm*cul#XFBSbIDY0LrByt5r zs_^;wkb1xZMUYJo!Xir7k%UB5N4sg7@8osZakERg5q%6um0GK?E{xgJiqueVD&3^} z(wXzB4+Uz_>IGrE1C(s3Z4k%pG(Tw&9c_1Ce=QZZh5n}Mj4t4ldrWFB*3meqywbBU z?TimRX;jhzMj$DQO0#i?gUc?keEOx)bw=5BC`uWcT|f!5M!eBySO{=&BUXmPdLPdc1*mN-SZEkmbb>c9JbRoTLQtI{S}<>K6|F&8$=}Kz_xWWKb!shV`f_>s_6QC zp30zVH_orNDc_pQ+R7pmlfA0=R`eF=tI!MjAh9X9l22|=#!t%ePJb>&bI0QWZZGv} z&+Ja^R=3x3_`0}bP+G^+na*g09i}{LefarVm?WiRW)5~&3>%ZbGKww+V^#m88>auN zz1f;wHPJraq^q8!TObCcR8o|s6Swv>)N8D;F*;&tvb19TyFvG9lPOqEEqT0K zh!&ZuStil2$~604ocB4p1KBRLXUBzIyE$j7kF+$`VA~ez!LImwWWlrQ;U{LJn{S_h z>CZilRK`3L-wK)V3tc1K#$CN8YM?@9Z%RFs|5e_XTXIW;+*L{?(D_ez(<~jW8WQN^NOB%3=H0C21e@8` z3*3zwD+CO^Dhw62IS)M3L;c*9x~^KAs`ZUlg~e>9od_Umr6U7eFagJLcFQ2b<-5d? z$T{LCtQs<-+ds6!i^&g%goO;-$HR8;UvhV837U5E=?}uzzumT@dB1jbw?1z-+s4n- zHp8R%yLed(h}qvxRaB%u>iKgm3po{!*wA5k>~wP_j19u~YQ>N`JP|Et8MSpMY`F57Z{Bz!~b(wMAvf z63iRrT$2RPNeJMl{v1C4C@(*6#13-Yq~@0nz8OFmNM2-8hWWU+#&*JP0VUB2mo4Wl z`Af5$+;(rI@e^uV8g327Idu51)o-nxPov;uq)`%ouKiL$OdW{9ph+uXDmK^4d{tlVR>9Fz>navRb*E_tY)WWwCf zqTscd#8lv@R0X2fi8Y5S>`;c>9)eZmD3s5hG%AF$I!XrD6&uRsYKWyjydI)JSi_3i z9wT=w0XfuntF=!HCDBMdU+Sgse>|*?fViJ0F<4(CF4w*#E!M&)XvJRqDA7IVa$EkpxVJ<*)yO7~j#YHV|!-5}=ooMWC9 zo}#IKg>Ct5l5^z4$_v43zHFty>a^9K6N#zl98&c>NRFT40C6CxJXvruW`ZI7XGT=P zfITsDu7N8|-J2oQC8>}QdDcEGk5DOc;V>G0Fk_Cu&t$0SayEWfL-41`Hr>!S*n)#1oHm}Gk?C zB&3wXrVd~O=g7IaB1c-TIfIR?SkWb!9EdULCL`}Kt>{A?W;BYo9`u%@GWUuZ!zVc$ z%q|g%<~($CB_@Z7vqrA<Ai;k4N=)lij*HI80y$;Gmn*j%{kYia?XKQEk7*nA$mw8r7gC;FGlPQ(b@q0ThGfVxR(VG-Fb>*ut5vqQdoZ zqZ}=8@EiwwImmS_?)o29NjN+*qOU@_PDezJ>+L9N*HRtw=okKidE zPZ6J^>)EQOjnNK>{A{{ff2&TuFE1>N#4WxB?QQK8YvpY`#i9(ao?;93q5?coUj%r4 zZrK@u_3?$4Vg$7Fc?A~oc5g(8KvzB5bg-QfK+3{cCe|WDV8$Y9u|dXO z6Q3Uy4{<3A5q+(m(zc*FE5rsum#!xW1p}nMj4RFo63IMxfkD8J4pGa$($Kc3|8ueQ zFWz6jrlcjPY(mQ(J9NiY3nN}z$SpVmokKUYbii*m^(Wdct4$Ojpa2+t)Sh zy`jFosUy>xZ(g@QEnlwho_*==g?`T2+5W(iufuX@xM`qxML$ZJ=Pt~hrB7PE3WA^k zesGaHj>jyR0|d+l7QX|CHtheR1B(<)6%Gtke28=~Q#kPXPJF(@A%*}$S$#N__4rLI zI?W#dl4iB58!3+DAEnpzY63z${dIj(coDfhUK&PVW}=Vip?xe8}NQXdXf-h&m>5?^NyYovJPtk<6V!0zS%~U?;eDWyX@| za+cea#I5uW=8N<-v?nvF0>gGG0-~&>fNK1z zDYConLN0^61VzD(8>OcLfKn@bXnc_3tP-hyTj%kLV_0pyRuz9_#*#~FIgck;o5IAn zg$s8-^p#(K=e{R2T7!1gPPmQ4X8sZg;$VF zV9cpe6j#Q5NCTtJ&rjgE6>XtWAVEPwFPC1JCSW>+SF;@VlG({y8k_SVjx8K=uD4JT zUmvr^vWD&BHb@&H+>MqUmQgUOj!QpLPAgO3O^bB_Tn^WXYa{*f0dXL*LmY|R>;0;4 zzh}SrIDC{k8h;8t4}Z%2H2e;KHuA1=9*Qj0?OpG^PrgqXjh~Bi9tHkjZVD*qB_mn_ zQGjXQK-d{jM^y@_f~t_B`H;H*k~-Bn)v4YPJ>!P%p9VE6haT^>F@F&^X1lkD9@4J9 zEIVM2W1oX^WCcO-QN4BWP#vtRK!@o)$3nmgZ1wxR z7PhOt>m>EHYH46(c-3@yWO!z{e0G>XPuEM+)59b{ob}e+(6%PJC3;Jg{(2O#{_sZR zKke=9upJJUF|+^*448(2AQmOWt|xp&iy#ysP6`Fgzmvy>Vr@2l<3>0vJ?(Z|Rh6~| zE8M%|gI11?R`|%+AYB8a_oIXzt9q)C2f5lxt0a+A^<0%15n0mR+G}@z>um_f_x$gc zriC$&Jsw}YbVCZ^BQ#RTb0Nr0z2eb_w^4ydZ8<`6R)#cGIy4- zAkLFPGc|IFUP@%i+)A1i2?Y5n90@dm5)Bdx4B8cviedCABo$N0+34tpW;M?e@fAws z8T$W#C>9TRs!R4_G8BBK)rImbKhThwo0@2%f-o0m(F@j8&+AC@OcE#~-7`;!S9{S_ zK(W&_zBDovg=TahN=0Qm`WSnVuwszG6o~H7k6VRtyU~`y?sN>PBKK7)J%rQPPz zPNhe=R=L48sNA6(0*93EaVNPK8|?|+mvmg_jro=YqRHT8kyvzzj4rb?O#w1$auaN7 zl4-jP?5ab-qQ@H;2#yAi25C9CKS%}NOZT!keNScbxP5wgf$hq4?W*OqtEOj0%AGTv z_y^bfN6;f9!W9VP0^uU@)+Ka?PihH1tpJ@tn-r%hu+h>)s20}N8nk177{5#PfEOu%!bQ5R>VS|XUhv>*RSoOZPrF9DADH^^ z$2)rv?bhi|=xKJVL8;k3doIIvZpm&~y7}1P<{OtSy6}^qz~!sH`)$I$7hZpOdBhVR zehI#|WT@D4)6ahXpBVWo5eu)Oj{!f3(&LvPKg9=8q;=qhK)^+s+FWhGFagAqE))O) z{)*t-Bw=9OF+Cm+>H)i?c{sd!fWqq##_`o0;cRps$5iHCC0C%%&!5I#!?f7#gl@3< zM@YlyE0@az{zwR9PfcEw0nxxHIE+Y+o}c$2H&OkI)m0A-80rG2aK|_ra05v69_ASC ze&##O7(?Sf#i2*T9-F|h_xt4-dQRL$PmA0GKP_rdeuxgaTypH=;z2)k8taGh3*|D( z`tZG?JH>8chQ0k_IXnOc{IAlMuo6KMGg1tik)li%UUhZnc^L;DnLsg>Cx_QGX7Umn zwr=oU8|)8l5;xazkhZd%m3LSISF(GlFSC0b_X@kCk5EsF6TVld-@1P%oTvUT?eh+B z1N;zry1mxtxu3buaVUOr&U+}@iv5X2{?y%OT}Cap_Q-uypY?ibgxc%d8$RHB)cUBc z!cSO_*?vm>7d7QLZ}an~IS9DZ95swvJa+rhpdI5_?oP%Jj9>uYs?S^W_6K$a4h7Bx zm_R`K4PHYwcN&Ey2Fq(7ZeBC5@D}kHUn4WS*lkms*DW2uO=L#@jFUA$!WO@x3UQI~@m*7%d^f?!F;XARSv2=CC zx@wvUr7x>yhOuB8E~91| zhH)1E-{`{|Nn%45`HzrF0f+vYJzjK}%|$1A;ja>mrEr-5H<(Mtr0SkjeXq7>tM1vV zdseb%x~#>35H5xlPtl29gfc#sVQj=)!Ets^093d)Ck6uny9a5r_r?c3oSmug^4OsL zVqBb0DST|TI321*NH30%nwqGVvwsJ-Zr!tKce5P$`Gb$W_m>k7y*RT6K4B5UTiVv# zO)Y$R`}Qrj`}e*9;cwrA+%LY@v7w>eybD*gdH|sBuzVTl)Mb~jg4RspVY7+DS~IC7 zNIDc;EaY8j$m4zJb>Z1;FLrE~mrzD^sRxV0pB2SC8{eSCLIAka?h2H~yewXmJUuB) zmQGCz)779ii4%YqgctGe3uK)@jQ~ypH@OC&OU-DSZ9t#Iry+4*$YRfhBoiRF@tSET zUQCXo{oj#{z~yRgnh$ZW;|==rr%qv|jooimW~;y=e(P`S*Qp5a8jtb!*TeA`SD|@Rj>itgo{Ld2PlFbMdM`@& zjmEUs!}6R>K#oy?S3k86CHWQVgE82sSD;`zQ)%dRdxZPlZrFf9{?x!gp0phu)wWcs zwhb+$HG>f~zuyJ1YxKK@T$8TTF4h%pdWL3ct~S+E1@o$D1bLFpccSL4^4SqmW$Wym z8R;y|plm){TRGs>lKx;q3na8)Dgu)Jh6tQrD33xJKraN) zPp2*qkBx1Z82z3t2#*DmAubg>&w7#jKA+_XsYSR^+d!M~^NppLwpnlfp zYq{~&%dbEoVbSa|`u8ZaSO~5J8|fJ`lkx>`u=ksZ18p>D7B*8m8h5OrKqH%Buf9)V zN(*{6Z(DF{VrcU|W}oHm&^_Y5!o7>{Ub=6^7kj=D`i6L*r^1}HjEBa>m+~*InB07N z^VH^Zo25D>kP{02HhHt<-}vsfk_3Wuo7ydbaM#8EUf=5T`K|n@2E7`trFl`@)36!+ zjuLK7yVqViq#f0s(`c;%4^C{*M^RddPBoqQT<;!Li!M>oxu&?k8+M!M zXt#+DbvI#`>n00QAaDA%@o*Q9IvzYlP+$*q!LAD3VmiXzwrm*ohDXCx_=nW50gIe+ z73f57*;p>T8m?|?a<6)pHc)(xp=}YYq773P47knMXB;wUL&Wjia9~$53dIaP+6Vja zvpJEMqt4IA1wZNhk<_jFaFNC4L=M=e$tjp3*bUX?_oZM@YA7|CI-O!tE_`NczOIZq ze=@z8Xi~Q-n~lxp=EG>rEt~PB5xXP5*|qP1WpEj(cr9yDg3uit3jPuWm&)8VoHC;VjaWD0LU6Ye8t{EgX_c!Zcn9 zg;dRmfX_!Kqo_3gOZALOp|~`1ei|?6$49g?8agsEUj?rf!AUWEzP@(HiuD~!8w!zV zNQ9Pz*4mP5$+yt#Wr?0dMr%y0)A}MXvM?3_D+;R=a5*d~V3DO10lm$uBH(JhPk~Fs zWf8bGxjq8dt&es{=yYizSlQC8z!lwvHk0a7@N6X08G%=2S4Y5_^lAkx4Ru9GQ9+-d zzpZUP{(l@b>UWVJI8w&Jb(n-w)0PnsMhk)$*X7O~_ts`{kMsTt_kpPmBB&z3_VciZ zhb10nc_{PH%|kEEvu0a&6Oh7P74FQ$U^v{(c9I^;t!{GTWf0YPNQS3QIMsh`x%NZ2 zXMP5ktY5-44K^OHrI4ho79zj3YDbck=Fl&>3RWkGNy#Ibfb6AvXur_6>D1x72Y#fx zXx2iz_1oHCcx=h?CRsHiL;t?0yzMjJy72t&6?RX7>(A>&80g-*B;UL8`lY$q53@$c z)*p^Pnae-)2D~c$^^IS8!L+c}P@T=fUNJO!!k;MmJqpLr7OQjU>ftT-uWxM=MeTCy z7P&=^UrX)TamT^+myg_WXw&~=>}#Ogwyres0FnSfkO24#fPbJ!5QO+AiKHaTY6K3Y+ov06{o&9;+B-gh5JS?3`U*;9#Uf8 zDghV@N*?C&G&JNh^@dqS8J8f?+cTt(Oj&_G$`BZWR$O_PxB$!Tk&G)BY>_|)(q9>2 z8+4e*Kp==W18)VqnN%Ia0OzKyhi|g&t|4 z=(L>0=JCQR%Xn8k2m`-C_V^RcI_?y@lue$11D*f^o`43v<8e6H*&PxO835J_ADXXFyW)ljxmmllC>*lcAbwxB~;OSn0Y5zi;w zY67vG#wGzgv^AQR-^P?`v8=ygbkEwY>g z9|*6>VQLWj7`L|g*DbRdZQb{o;p->kp#a}yFJBX}dw3?jwyXQj&5`E%<$(`m!-|;J zBGmv2x*uJ-?NjS^eAxcZ^55>%l|(LQ=B4Y=+ArTyt#4aS+)_#Bav{Ed2lD~Pga&(g z5bsz#x_ndzz3dB;CtHK__4eR=2@moRR8Bk`(1EAJ z6T-;$6+F?L9hZJK-ts0nk+idT0AW2|TmXq|j~l;EMofn6p`VjmwzMk4!%oFk`}#cF zZEzwU^U(026s2c6*bv;_yLJNJ>GkHaelmLeGz=0y89i`k&gfAZZbEWJC?hcbL+X$9Bsoc2a|YDPo0$g15R1)a>}eh zO1q+Jbv&Fxn>+kgDu@S>sSfL@%vwPeLeuzGniSO`u%RG7?M2=u;DY1VHK4G?p$2o8 zAox{(PO*zkg~a`O2kO%b8dcDQGO0`}$CQhTTREOR{#Ru1kThf%F&GXkI*=Qq0mxlg zN$7Q^O*}9hr~Dbp!*!pw@kOw5C`^V#s0F5nv_%*ZtFlZ(E7ITFyOQsVrQhmYv!=6i zaLt2i_we#{*Hz+PHl-yB0TgyW3I_%|JJ&2{mh>G>EbiLic695PO1diMCMo*z?aS-Y z5%&=++ZMXGYVPnALd5I~rQuh;bP|+4skDUn^|lcI)DAgP?2G^&n*Hgy%gf{~xN?EK z1y}wjc}p7J;)S=QDUW4>R0{BH62`v6(afYNXf6N~2bcDg)&|oUXSWLrG;K(*{rox@#eJ-7z){&^A z3H_LUQD=0W_*>^L{}$WG;B7tLS^S3#NCwHDYejh+D$Tl&tbJqaB`hNtCW517F#Ld4%<7C3W!M~A*K^36^H{qLv4!6AK?##YgI@xTKfYTD-;I@i&&JVUOe`pUELWiF5j9_ zD$y&Fc7z=6EDkQ~?|<~|^VepxJ$`nlvGXtJ&wZtsA<~B6F>@{fYtlaE_p4;Bs1f*~ zlD$JvlF1%9WF^55(h`L|#wKqPd4h^vbU;e4SxKW7JRw;N84m$06@ou1Q4qvR8>dv- zIHdx*89JyyQiDXABqXO@c|N8Y1@b{)m^_2gN}g(9ISuv_w9?y0F)$)AW@0(}5S57sR*dx5P#H zH$iSl9#V$V{rXUIy?B7XN93w>r=%NtzS`_Z{Sq6Iwxb)Qn{-!J+KG0GZ%DtD+?(W$ z>F2%A^Z$!?$GlM~olK|K(btL|pDc#_nvfJzfwbpFX1nV~_n35(d{fAyipgXueIxB^ zO(fM`g~&gW7~X8)@8|&m3HRf1#Cudz5b#I0&=qmDW-1UvhSy*kkeLBj{)Nm8l}fY! z3a%os3h>lfJf*D!aPb6YZ=Y?k(VTe@1v;L9J*kSD8i3?KHh_+UsV;}*Ck8| z^hE)k6J8hSNnsx1Z?V{nf)p*?L`@8>s9d#5Riw&nWxjH*;vTJ_smgSPu1xed=g=ol zWWM#sL=Ege0DL-(Zm*XP0N)(^lH>aauVjcX!l+l_mcjJHf+Gz=zPLkyBFuBBz|sdw zYff23gE|by7FZStPzMg|hw$71qUO{Zqgu~hN zK_NFqr|>!jP+p#t31z+57TGa`)r{>!lFpd;dlPuCLs{oNh8AVa0->y{Cci>i?`c!k zkqp)rP;DETh{waSA!3cy+8OAmy{JV`-xcHsDrnnVn*}bDM~~j{@!_}MzOADxr(Uys zT|Qn|{-s*kx?EYGjrhcXu0=X!B)K1bfB%oy1_dFUq;;LHta)|$AMejp1AH!rBB59f zeQf#MSigdDIoTJ>e2}?zW?m zXaPKa+`{qbDhTBm9z%myK+`zdPyCVuGo6f3 zZk8ihiRD*TLqS)Aqqfl8uhus#4-HkcKw8lXGLqeozBjz1KS_WPV;|i>Kt7q7JYCEa z)J_+-YG#U!5es^Zc)epX`O)R=BVFw|qzis(_+h&UEfRkHt|B?B*xk3jeb)N+S?ft= z7@W0!c*8I`Z<=BBRq67QY3FLlD8N82$K&Q z`XqQ;A29F;gS#&Q9rqE#9!~9}$#Ig1)+dubWYUKWv?-?#y{i?`e%|Rr?p$G~oB9{$>s2hRLg8@@jYl-^Z^`J3;bYec&2_=03+qLMrc&qkVn5MgTWJ<4AjK z;mYsG(UFm{U9BM{uk0m!v6f+4ePtRlo&sT(XS-uGF>{F)Ms^CpF<5>?6WGxTz^*|n(UkHqx+yOx^?GpZe%3A zb#yB|wRL(cO>LF7(p&L}KNXJFw@&ODo1<^W^WY(64(<6Yfy67kr%T{#IPVmL*8?_& z1T6eFME)}T!~qIc{DClr&F^5>^;FfU zdjm-6>Al^`2HtQF&T;&Sy*@0I3JGA;$b3Z@QGJd#;$QJew5BE!!(iFi72UJa%&u}) zTm_|TXmn4o{71DN4@5rp=$1|UGf_X^`+?=b(3(t)cg4*ejk`u@Ix?_+xqGDPb7xB1 zdK)*E)$T3JYleC>0=P{Pg^TosJz~Cd&+bod*|L4$f#px^(4*;GE+%E=QS_&i6}z#~ zS6tpg92$6%y8*X#+sRVja^&XTcrF)TvmM>?m68L@vHlY%iuu1-|J4}RYW>$B`mc*1 z^KP;aaAGtIilhRsvdNsq5k=O@7baSSixNj;D{n;jLjt)&FOGu8KM$uzsU&%Gl5~_L zT_i2yXS4`}x7twMa=;>~yxYo?z#5T8W#sU=tXINIF{_k}=Vw614usa)E z1GGk0W3(~Rm~2cp++`QCNqq{xnr(Py8|NDIYy(Z;hWQ4QU!18@N(<*U5irdh=2lszwIXNCp z;<34554!1_8#OXAX*@BUXU^b}DN;$gylQMFFc5W!wggZ*_NWBZenE4=XoG`Lu|v@4 zBB!}x2IrFu%9(*3s-)vxxk9(`!f`lHgEz-P3N=@l$Y@xQ2!@mhfOHYO0acePB?t~} zhD>=Nj(E zgf26aMlVEupwV_3ZQ|Nj6C{fn6|wEI4GXUeorC?TYr-|@ns&{)Jgy4@ghXv&2AgLq zOc7wYgHyY>WwvnRmf6t{-u$GH+Vo`FwdsbNcRdTdet2jhJlqhb+Us`xEu}F%l#2>8 zJ#R^G#aF$=v(Q-Uh9DIZC4)vFPaBCmZ+P;u7}lvI(sUH{;+n$ZTF5WyD9+$r#23>k z)s1&8d%yihcbh^gW(W@t+EOD)`ChJ<2@^ZhUOq$%gbMB)J*KTLeOB zJ|~n0v3W`&G)ju)Vr1bWu0Q1s>BFK=d^ z#0=pFO0#82p@h+d}G*59Vehs$7dBh0h2;%0AGv`t_CiE@W~}d z*P<1^*tr&AtO!#7AioBrxhZXuJ|sONyB;c`HKn07TS^})-6`K$x|_RCzOVEr+&9^` zxZivIU2AsL#_D_PE_)5Ca!kPrhA_NSALl zRSEQTr}=4~p5UkWCwPW`Tc?RDGp_5SK%bty zj;;8snEWl{xm8Pwfr(VyZ?AI5p5p@^nb(j8TiUNg_^c)@*~QT69&&$L*+KNU~~yEgEV0 z4M(>9>4*0}Jo%l?y@j4wbIY=>_L-rGluaoHs(S+;zj4nsAN;VrtE-x0ng?FF@7BG4 z^5ey$hazHm`K?=ODZ@ZfU-us7wy`cHaA^5E`?3SOuK(zn|GxivB?x@s<;^Y%%YKq7 z((YBVujPsA%}2;G9uW^>Q!Vd60BkA35e{e$DT=Iu1I}}Tq_EjQoB;v%(~dL1me{06 zOa-%s;<3hpJ~rUU85Xgj)$V|KBH@mZ&Br@ICC58KZsVOGvzn--c1R2=6H`G~%+WG! zm#51ArEsk5>e9M0L!JG_ZIZ3onQfgLi@U^8Z7emKxw&(9ai4UXc3Wm&=L6Dy?NDle z=1}po+MgG{CVpA_TI$Q0zwG>a@$u-lw7*V#w|FM{J$wbP6)zOuD|YJTyN$aGM?zl- zeI-0!W^W9k4lZD&ShM95h$~_$&15wT!EI%Yq{6bEKs-*R(*Y3EDwRglh@L=G=n2Fi zj(9NM&UZ_Z(FpxqfUqj?&ek`Yd#p$M4;dm zxC6z5uaqcanON#@jsHRvaT+`yAm30SOulrw(^};nmKF$;Z+r#y#_fIBOci_R9?_<4 z@iQ*g?nJvgQ5v);v5Ozdnx;OSPnl~e-q$IIbqTqYDTw$;0usg;hGMbhyW4|o5Asyf zov4$NbLn(iM^pN=PE)#sE#Z0nobJ{qtZ%L4(>b>6f%E&Fz@M~uU~yb-WwcVO0)PiF zBE@R)cvMk|m??ZgnBzhx?qUj#3F@qXgd)A|yoa_m8C!{cU*#7ax%U4?v7CHa+{}v(k^9 z*?dKUdNQKPxjLh%V8u;$cC$`5YH*7i-Q~XDO}mXwc5MpnN&SZuoic(x1XuWut${>| zVy#Ex0>m~mgHV`et*13>FF54_u)W7&lNJ`nhol9P=h(^|v%Dp>L7I zaHuq1`s4D!(h+ooKEg~zrnIT}RN|5Hk%}MIPT?7^;7RbM7dw8C-nv26I`z&n23qaHr6Jqbk+TZi<30ReMlX z6-Z_ZN2~C~(mx6!6a+%sZ-B4Jgl*rXh$h5Iaav@=In-;brlwZX9O6ndCYm7W;0)x3 z)8^Air+Se7oe!~h5deCdm)oCXC;&I16QqOf~&1;@rv&C*Qw%|v#MmlO6!vBVbgLZfjkzMpkSe3 ziY>Sks6APLMG31~yvM#Py(_<0aE*_l17x|9UA8)dX6PAa#`jhKbYxnaj!!2Z>-b7` zrYv9xUj#-RF#T*_HCxU7S?Or*XvsYe&vvxsg042b1+|H6zDeUP?$F3J!o1*{72H%J zo3|-Qso+pRhaHBo$Ky>>sLh;1#?Ly1fQwV1W=RP-eFdGq#325NApVG8v!n;%Jr`|J z#HWc(M)Kp&@k0j}?Vuljjvt?cH$^7fA11|K{fC?^*|E6or|F8OMJwH`%6Gcw*f&n@ z?PP-~Tv;`Dwijk`mSky7k+Enj)=u(*g*D|`JMA!Mu3nCXp3da&{qXu7dV2TgfB5V@ zH|@`9Jz^E<0g(JH<@4EA;*>GAy-Of z)=)d{8VV&7s~k%t=VfT=-1zvsbXHmzZ*y0$FvQPLetZGLLUT9TP18fk$K=P<=OWKV z=hQb&^ZB>wu5 zwo$X>q_U4+8V6GDCCF%BI`70+@uBlbhV|nCCpzmyg0*r0jFnSygmoKo5!{D7B1iJN z#g^<6uBo#UWF#vST-R44s4ou;_g=D`_U2b!sTDHU$YyqGZDm*IU-aEwj#;knFaMAA zOMf$Vja9hq_S){-=|9Ru@7S3C2s~2y@_A;7`66Y|yH?3x)Fhr)u4PL2^g=88(!ykV zs%1r=clbBDMo!d%B+?UX3udrwZ{x}(LL>|3+U9jYF+4s!pm>s{fRBZIm8ZbG=6I_5 zvm)e1IA+I%HVuaqke9d$%k5;HL>`9YeY#Hxy1@9!{rG?!*A=6-SC14O^Qj(?B zKqgp1x(V~otm&%z`qBgX#`H$rt#P4kU|nanr3^F6nP`|zacgy-k>uvkIy=NuhJmL% zxSs&;<9)siNj3~nvj~Z363w8OkqZ%uWl+^}!Qg0UIz&(5-E4>f@zh%aip4Yk{GqF{ zBA)6X2a!DbL250eL)Of0uAoJr`@|EXoDj7HCCPCqnP@NW2=q!2+2c8~PR{AQ*l6T^ zf#e(~9LE;6L-naSSaMZbVOv}=d}JG$mJrJjjc0BbGtsmeSpIeS6A!H0y1$g@+lYq8 zhKe8G(!81Z;?hrMNHXE`Q`e4tZVEj%+!IH}($T5W-VvI;zK=E_ii%}<5zDep|LTe? zpYnPsE$AUnE0FP~w9n7G!%A&*RK zTRG^VNek8(I;rT;&)MRYe}_0<{n=TGL%?D?rYp*-v6Y1H9phvU#Z8rZ+gF> zenWda{ypv~?_dnP{L@e}1;{$1rgtvusJ9fX}YQ72`w+bL6)Q*PMcl&0yF z*{oB_GAV6aP1MDX2dD$+06pn`K>xJ+v+|J+?*Lcl>q;~Jf@ePSQ;mI?e?&Q~GJU~~ z3LR3ysSp*{Q&fkX(FCW*moR`Bjj`g@*E)GM(P!Pj6%A=`1 zWe(v9Z#R!5KF81Sr}!W9ZvLP*4#FDqQe7sPKd#}x0O;s zxFS6bC{7Cv$~%uuyE*jSDX9ZZbvWM!J`2iIVyK=0Rjo=zY{Xw8Pc_h%ls8~$Amx&_ z2;~7~ab;m3X%Y1G;nspc?iLB=Uwdc*S@~yte;b*QAyPz;jPMk%NBu)s$-Q|Rm)@KY z2%WEEw8R7boewoR9m6HOwOB65aj)8buSNwunopO*4CO3Ti{;JXECZ5(Y`pgRKA{kC zN0AkOJlJAGS+W3-DWfM6rWN_gD>2U3QAfpkIGb30##%lTEu`fh=8HyN&vq?)Xn%h) z;1zv_;gVD9-~Tn^?yX8*4x)4Z%jexE@wh5EnN6*6mCvN)09^vtXn^wO70y*K(jL(R z17c{XT8%ZOrE~aie%0tYL*?3XJ8vG(*PRW%*YN);EMPv%ScW`29@+p%ve-_CPBqc7KI zZlgxfR(2D&A-CS#TG(nm%uY#D@}CQh36_S8ZQ=JHC`>8 ztE4Hni}gfYF|Waz-h#(!#IBRBlSkb*usf6+tVeuDq=%J9)JL)p8xQBFDzU@fhhm5G zjNdzk?v?J9U3m21pVTmT#NzQI#pINvXEQ0CvPvmR3SeA>=T%8cr*)m8p%GCfd6 z{0NV&z$Z`6Qy;OikS!Lm>dWdsVg2~|Too&V_-S1m_P4YEZqjfAKCGPAFpPt({WulP;mQHL zYcE{n)E6Dg5bFs?UjTcKYby?JbxZNze(uWZJu7j8+@Q0l7#erNiAAhfK&#|onO88S zAW<&_8YKywho*)>wWm**}OCkSmo)mzMq+efRKSEWj8B6O*#x$kIFLy#7X7hom97 zKR;Ax*1HBa`L;>hLl+`}I79g+^ohoXmKUt(vZ8Ts4FGx=1& zElQ#+meNvMPM0j+id6@AX<)ne=HAhPHk&nvulv4!)DJiEi7KjA@^yuGQ5Cp{R3({g zRw@I{wgRkHo%MK$2Ap^Ha8IAkl}fW#e#f%R+9>|U4>nN4?Pd*?WpfQ9+-Mj=)HJ(l zLbz5F@UO3Ug;>orR9}BJukegeXB!DnswJ=Y+U;TjCDQmesaD_z!5~4FVdk%tbvWuO zqjEWwO7a2*y-)3nqG-jKLxB^zid5)AkQ%l+tG=#YR9(;j^S=5F-AmOd7Jck^qhexx za)PR%+B5X`;h~50))SetBzzLl*&D{Peg8O9H%(iyl~=%#RQFNeg&n@;Yq) zkFhTSbK|P>e6LDp=_r+?l1eJ6bX1jYsdTHQ?v~VdyWO_iP22G$+p*&##)%J14ksqe zB#sl3;BW;dsxln+~>GHn7!L)>)0C4`XoQ@x17txvKFdgFb9Ek3Z7EF5I0XCCO=ZyIb?~TNy4&NRrm+ zzw_#qgB3h3sxwah2FbZRv0;UrLsoP$L4v!YFzQF1jT#rn**oVLbrVk$d5%yw3A~wC zH}e-3ou1@E@|Wn{P%gC=1z=C^2*xDvI~n459_EqV33TFdd=1*^EC5g}ESX$CY0V=) zSCur~#~xp|t)AQTD&VjFKj69gYv4s{hO+gB4?b)p-oPu*p>o4VU`r}r3tSL$KvVb2%9 z7mT0xK9PI{yk7jNPDB<_GMP#wD5!cA9_)ss@;x^sVhDP?-fV($q93G^N!0;KCeSGf zuTeu8QfTc#c8)>pxgzTHp;mOaQAMo!>7v{1^kGR@e=A-ce?0;?X~38o(o1taP~`Y4ju7N5?vO_#g3@-Dt8P_ks&q8&_^W`Os)8QE<`q&1Z-g0~yh5 zGlOoW2J#?6JPUuM&Z3~$4H`~!Msvn;#`=`zf<}MF4K41ET5{zXu!){wNnYc&I_T}1 zS^90w>$JXyuU6`~HTKtt4z(%vO`#%f#w|L~Z25oBPQ<%3vu{3edFP zIFQc!B1h)#xVee$*#m%PQ2R;Hg#V&S^F@~!@-M`7E*y=v09}7E4X7X$pxrc1W5h18 z`dm|uVA5JO-&Kq2V@YaUU7a*as~sK!ZuJ*#b*!gb4ZXYLayMa3hi3r!CpD6ZHh!T# z$(H5ep=;)(Ssux$J~LJZR!mW|;jnVJNK7fs@+Xu}l%GTg@7YtBFDyL{|w4^zseGc)i_wexdhKb>(TZ$Z#Ps@b- z>8NLD_yXMeV(95^nj_*Cwb*m7n1;$W>KJZ38x7{4g@q$bcf1s49$a9h8{~8iG!m(L zW3WN!l97Q_GYA@q{-B!kkx)lz;D_e?`C<{IDJtEMfc?_JG;qbDxb!7;5X5wTlYtC| z)BsIzF{!+ujx1VCb`0e!cz(4-lcGB!``SWI-HB?{d1_p}XYbMOL^a6zfQR6HheN4S ztJAm3=cEkj<8PadzEm*LF-zn1=bzv6+KG*ij-&(OY&9hN5`{euFZo$ryLPC7Op2ti z5B{{}pqchF2ZJ2rlcY5d5Ob@~U%I2ZIU3H0>lK!sElr-qBR<^-r45AA2q?lIVw_%p z`IB9&zT{4D9I)btN{kL-z*>vCbb~eu>@o3rtIoApp7?G#99^+Yq6jXdLP90()3n97I!*^ZPBF6l!+(H@NgN1 z4VJTl;lXeuE(D`t;;f*5Q1r%yKvZNbsid1EIBz7DaHkU&V6<6CU*=clJ4Ag)dzC0|)8=d6|b1SlAwbsZzH~i zOopeNLSN!rTd=-YXEz|911{LVEEAhKr7ryO(1|ta!4s zGSlqeuFo{%+}EV~1Er8;3XPv!vUT&#UVNN`YJR;Z0AoO$zn?E2=APqz&5=QFh9jD2 zv6Fk8({Q~Uf3BC~&-HTr)w5)ex44{vHYKuA%aBuGM=g36(5pFU7AUF7XC9otuO^>8 zZvM5Iz!_+S`2}|`_Y4mcUKr?A;&DqB(`#ArMKkL%}=jmrldDHiJug}RRpL@Sd zd5puJ|{Aj$FI28M_ZSDBZ1+ zv9c`n#&olpPB$CcDqi6aqWIO9p$xwYe+ZCzeH9Soc>u{5kZdKS#^|#~JO-%jZDwLFfE*`8uHUG8|t%$M8p-V*tN<*|6dq+aGa` z4gBh5TlX9r*bBC3wrbV_J)n`i4S*<6Km|0w1Xu@d0z1Gya0uK9?gQU#?%scJW^QiV zrn~O#8#sI{mfCrXG(BlHj5amEfZo2KG?0>{)Bw53SIjzXHqN(p>W-sFckLY?{@C$y z{`LcQ*X%|@SF3MC@9_35f_KaD16#HnI8N>jQ`ST#6AAAH%3GJJ%B9!U>}Fh(`*r*X z6W1cQUBa6yKkDzG(p}pwcYlA~k8k)r>H()1E)@&;Xm9Q4t=+x#;`jRN_OD-G|GnOI z?Y?~7|6jZw`9`)_%zhRxepkp9auVL^RPyNcwL&geAZGEh?ZpR(`(AmX@(HxaqP@SMy4#J&CqHm(3Q5 zVDCpAJ=*v=KJ9-Nv&9VBKuE%~VSEh9!T`8Uk9Vfh+mqCsy8A0a`m`wYRSjeZvUIX{Af1J`Pp`AQB-y{e`!vg!!Uc;l zo?4%J9M88<>t%JVc;)c^?MT)iAb$jSkbB8XJIzpc0j_FNL1)%UIK7bPFOUmo%`|Vp zUfDeQgG#IY)@!!6UPAz4!&ahOR0WHV^O_!5GMI3~(hx63bx{dPaDA>Jj9c7o%B^vFEd8NP<6l4iB=G*i0^No4Dg3A0) zUbD65FJc;^13$*o=)9(eT?$IeUTtBs2Ig`3C@Ud`GlVMWQNOV2h>#V16S`&(Ta|T9 z0iVd$No_cz>7ErT73)Mrr_g^O7eL4oN9rsU9%GKPLqH3@#3Xp zDWN zw{Hh`?%uoC(52GCB}65aOkMJ*bShj=C?xq>EnE9yK|$1-$H&y*Fecfh6$I0%^vaO9 z=W%%jHl&twMPza>0lMcBAj)+ef)Ai1fhp-Omsb$7Sgu$J&9}$)kBjMm-Q$x3v_BN} zcuQ%CvRdGM)d?nP3e=FuOIAackaN8LwlUZ!QysG?6$13Nh;m-F}ozq6-!b<4@R@2aBg&N^p<1W_ujH?gDCC0W&76E_5QV(w1I7d)0AJn{q{`U z;c`Jurc^4Jy!Mb;wsUb& zl#_gyG!*Ud@oP735OwiV)%{6cb%59zC!az5y9MlP<{n%J_pQ5g9r1A%vL^rONjNz)1SP+} zxGgEpNyT)zQWT5X3YgPP%cBDzk;dWdB;~$r)6t-DX#r~qw8SiVjp$s+V_rnkGnZ|( zkZs%*mZe3`aTS*IN9a0kTjsAtj_pcVy~?nNgyb`&9S_W8M$;~a0-R~i7{Ii|oVq)cVT?xZ0p{cT;r{K1PY=Sua(PZVoIMPg!_48( zqn!PIDa4CXD8zv5ho+?mbO$c)-b|m$+;K-WcS4TGN8}vfDZN^q+G|95dA>8R@`|!x zQ>7#B|M~gC`^ZGw5*6?Wc9l^I_-o84f3%1ze30x!q$vC$!ourhBl+wfCGSprkOZxB zeYG;1EbTanh@V;}z7lqeJ>nN`6qV%EA1VJnK%yer{y~VqjW^0o0#E>GIPzbBjO6CU z<}u_HM7tXA*TYV{CiwM{J{X`xmdi$rB_jOPs;Ls_-ZFA1`Z!VcQM?DQkMnglnuwN;St*@>%10$2OUNTSvuaWX?x_z(eBp_{yvv2AQ@SK z1t^ev_) za}_UB{qSZ!M@t@MI6*{nqbaB8R|WvU~D~<^Vu}^{ZMgs(bK^Q`A z!O&7cg(2Go)T8XW6ifvJ5WU?dIoD`xtioyz!RbY}9K|&o8U=(N++V2Um;&+;x)l*F z6;y*7xte&`T?>jCa#0aG1r2_Hg`nfMYM14O%Z*^9$`SInlAd-2Sbk_?N?LtlOJ1x_ z&8!R8?yYg6&utYAK4qkJe9bN2cx=P$Bd^@LW>1(C?37l+FtkP_o^T{mDHophj< zdh3qI_6$epSb>ekQXZ$RHa1?9rcSNt!LBC% zNE+7Cu$p=}Mbx73K@*%Xz;P0e5m1Z!C16Vg6OSjJOlT4bSJ}@LC6^$|j6hRGMfM3` zCCH$%@RpiO)LOs_u=`qc8!(PmI#5vU1x*)3=xf69#E%~zw0Qh&;CHkkWA@NFeJPlRR2r zx;v^KQZb-k*(YS4lC;^8HR137U9IWYV9Ul38m^uv*>rq1KQ}SpmHOMpENZXbTS$^` z{(N**Sl2nur~3SzC!}iJ)j2ODd9x)sGkR4DC{-`TCtXJ%D`xjSmdrg6T8CXrgaVy+UVy1@x_$7&Na**SO zOc>R6%Mf@V1*mW3SRcC1PI!Leq)&gGK)Mkg%+zdtpYkETjezaXW6ObY=)5!MN( zg)@S#(cAc*pcMqUlsD=GeGy^dyySL91(%Giv2N@D+hn+7jx1n2ytT*z^X=XmA)?f! z`HfMO5PKss*slLFq7w$o@PC);LCPE+%y)u(DaCjE`e38l@l+ZCyIyI%wZ`APB5`3*x$70{)mS~j;%ob^zC-Lp*H%EHX=Vdapp(*3kQNkl?U%gX@MRlTH+e<9 z`sMaz+gmt}*}a}qr^9me8ryS)@mUHHJBwAXMq@HLs>7>9cQr4XoPGzx*=YmA8(EGb zjG;ye{*XK?osKqI9?*}b%A^(pl*A*0?>We4gl&2e#P$rK+~qf0U`2z)qPEn%arQt_YNJ0BaJ<4Yp#wr zB52J{Yv&bhV5Hs&W5TK=ek)mzJ0N2U`dO#ft0mtQ39Hbzv3{)cSEFm9CQ_38R)f{% zV&K-!>*7F)lO&(gA0S4Afvg~krqE!a^D79bpeWN;yO9)ocqG4$dN>S9O~%34?OrL! zdc#`D8w9u#@4`8|NoEBH>ZgVFMHLzCMe%#7umq1KtwLVYR>C+q8q^WUh5iB8n4FfS zo3315YR~^6&$7u;rTrOq$Z6EaHm>cwCkaZwpBM=ZD!h7kVMPD`KzEl1|FyY)3lATJ z@J^&TygVsp7)Ub#hF~~aW0X7u$0duoNHQcMkJ}~^qVKN&JPQ645W4{!1#&lC~s~jM!;WV7!o*7|QSS_9DS3^8j1H2IwC~x8_@u_Amd(!ljQ-wUh5m|>kQ;d9*w5+La6hC z#uja6JEtoByq5G1RLM8nDdG*$mmiC@|2aC6_eoNyvMNs8bmbM&*M7R#6wL?$nF>!( z1DmQou~#=+^BC&CDEKeUn~ik#^8nrh9s$H=0AmaUjGd7na6+Je9Ks<8Z7>M4kc7}4 zkMg0c%>zBO9UAN$FL;=f-M+VZC`vCFLkH}~?Zj3)oV3HL9oi!TFsdE*#)8^Kd1UK1 z-awn!=!eSo*2Px)vKp#DUtT`db9q`Vu8rMecnxoe*Rl=LbR{SfU$lf9Id~{oj<908 zVhRtHIuBI`e40=;9`~ZosltV>tTp&6nJYgc_3iKD_zhCOxrr39PfWPO(dGx@VH3!CIhSWM^qkAWZZyrl|jIh1`+(T>EJ^IbV z2VZ?;hRvT%;Xc3t$nw4J!JAD2niDM<5qtI=utC7 zn-WwAMe$czT9*7G=a&;QZ8jHKPH<6`qAad3vfJ}rN3Q3_U0gJ_Vb5t(tC%9HRoEL% z?InK=e#1StYww4OgWHdm8oRPaacrR74%Zc)3Xj&{MnfQybS767{L#Uwusq=;-y~_V zv1?*--;h9?I=`0(;w&C5>TlGDsmxH^A$F1)%AgDN&*mB%@~mEu=^O1_(F75FLtqfR z+nk=L!O0R_m4XM7$CJdK=trZ(codeRa4Y~T0eF)DC;YJN+UO$6cDRw*%iO_`B?eY4 zn=Q9m?zU)ZX1G}gA0m(`=fuRXWdFnmKjMSCd~n7GhkP*WgF2tf7x9s&No4ljMG`*J zM~1(geLhRrvcc@*Su&e#gz>q~Z&L6%3jPHJ4^j6~rzw)E$VlV@a6QL1-Bpmc4>cRp#lOU@_AIuymEV?U$-g z$Y9ajT1Kp*PV$Vqiopi9z1j`DsCgzKsMGZTe4V!L8afE!nyzYxE>{_Q){BrPeVxx) z9gM>)By)~VywC`ls8FK_p9!v6KP$&de5zbltPr{r$*jLUY)@oMej(|z`pat+U)ct~ zi#?&vZ>x2`R+5~l(JT>F#!{@Pp#!{sTYqM}C~;Oz=R3Zn;M7U*_sBW6TIpn=;z>}^gTFh^m|d$8R9$#(2)b%328%?oSYI2Hqfs+HHdp@|coh&I1D^!M z4sZw%831Vzz~Kl+{{RDLnO)3LhAc9pNVO^>e-eW;F&K(L5VOUISgfi%QiZK5?60n_ z5>;L=k4#Tbj$jBYG9QTqdF;dB?p^d^TIwiM6~Byqci;wAL;e418hqhIfEn^9B`i|Mi zP@m$odl-{O6zP1ezaVR+v0^Y#oh{~XUYixi_Kn8s*$Ct2OcCPQP7ecVhkxmtOix18haBx-QQg= zxv@P%sq0%ruJ>`et}T8_)HRjwtu+HWyF0Y)mff)*e2>EZ#Lvo&pn;%+{aZ)E(-RHW z?lK`sTFWBqn9q&GI>-C(j#npQ)^D7LPj$@$HZ_*(++?W?k5y$0p9=EjO2rSq5-dku zs?9{9_G?s&_<`nlg@-!c#YcG3ZKxO!ljsZviT68s1I7D8A-|vZGK_)Zyaq3o<-AS~ zeeiW4Vr001`U6Mqko-|y3q0su4F;k9k(DKE7KzZ|bw=Ioa|Gjy1vrz3 z6GfqaL%HoF#|wSD276iFtMBNLc?s&^d(HdrvcN)E&^8)wHSFgq<0tQRn*3GiV8JX?Tw65y)X+SqZi%+9g+5P#f1RuE@v zL5wAE_%0`g9brdwO2IIjpiarjo!I_ywk$V6ylvOWaAz=CMPM$JxlH$;(VuLJuIu z3d1O{3-hv%leYGPD2O<{(2P7APt zp6~^;Bo}xh7q?EC78*(>Cks41F^3F?bTTW3BOtQ~j)2RK_2&pUB728)2Rny!E?X(6 z7K|1!rNH(MxB#@09Dy5$Bk=a-aKHwNoyjM&NZSA|n>&ughm+8C)oi?!jgxFHJ1C2E z%gQ1_>gM8HZhC%hZdO)$N(z~dr>Ca~x^hFvFivPPJ1d9d!^RyvIbLrBGc17}5}0TE z#Zq@P=)*|UjSc9UF%G+Tqc3MF)}76-JGNYQbaZqEI|_7f1o;p*xy&=6#bs~^Sh>=B zZThYV$RX~g8_Nj^OytccZXq03mV=*n`9ynndn*SIZ#LfDr)ySW%=?F0CFeWOBV?RE9_bhCAIv1Q?b zfo`$AL@S#2XXb_2FyI{(`1T3nw;gr_m+R!{!e+q-Ky5mTdz{820!0a2=wEoB${MYL z>QjWSu6DpX)@(a#dJPm469E=H5o|jjU$(Cu>EjEw`M&P%K7qbL!4VO@uEAVR5LoQ^ zcv$0ht{k_vo`6gAM3ia|#k5K;UQUPmp%!a8)tDO=j*a+uv%@kgruq3;95pC5u(@1s zcgOv!{d^t016=T~UZLS6ahK&M?Aq+bPY$$ee&Fd(g!;pTW8>!H8S2@*5BG?S1N%od z6K=_p_{}qJ1`1<1K|wCzqS$@-oWQ6Kw$LUC=uOyXz+SaDCF(;P+{Vh<8uPKmZEW!( z9Jj?WJcw=U#%9|BW4Xf<3br*K%x1gT8Yjqu;D4AP4xvBSYKbampy{Dl_#PcRfIgeR z1JLB?$~@N87aw1!OzL1467s5)v9$TSygspBc0s<(Gp`f@NqJGtzcFXJME6X_?cnoL z=+w@lkr86wfWc#gbL(<3w-g&33--5mPrq^}wyzm^E&_5%0Acx4^o+>UVKBeEb5BZi+eRbT}p&9;d{%UW1}X;XC44 z!S>!EKFw?V!@O*)J;9XXX7PE`7{35VroX?1GvR}O8zr!{@dc9BVmN++ahqx7kb_}O z63l81W(}6bad2i!#xScF79+4?tU9Gj&z|)`8V zZ^?CZ>lqj0XqU$k*cY%f(|7^FRvp|e133i2VSqJWG+a`n;*kf1>HI zbF5QSjOpnhpd1XcK(l|S7Cv(aAGo#zE(u78b&gMHc?~pm#}h38S|0GF-pc1`_F6vH z3eV1pyfYnw3!Z!~*?zv6%}L2zcUwDFx7dsbx0jDc_=g7v3;gvgI|tW_et0s!NnaZnSlF-IvgR#C z0d}qqRzX2*S3g(yZ`e(bB%^$M@|%4Jx_de~1_im=cJM92Fsug+>+!DdfWZhJyc-ys z8Jk&t7H2HZSgx|#YrO}8O*uORzyUk=7O-FIU~rsBfzz4R;PS*R%w6ko*Xs|TBHvS- z5B(nogar-{8WQp}bXT}d#2DVe$fu;9zc$ueuqVMGu_5VY7sr1)P_>1wTaq9Bz0hrg z3GTK-JG4VP{B20F{7Zmnhj#cEgrpQ{il!afp&i&5t(F0;&VEHE#|ExK%p zahXOsR@e{|jx|OyNxE!&D9!-~-7|-2< zF0;&VEHFFwRJv@5IlJf4Wh<<+3C9}qay>^1P+f062C|d*zmF>*h9bh@a4VB@GCmbSzP}vofT_R#o+06{! z9qCAwJe*j@WrScj>-d3ITDpMuxxE6zqJ`JJF|8>qb^G@ z0*k>&^dT@QR*osbZ!V_AG~i>vYOyNRk_KuzP=?a~D&HK0QPx>`U}g~>r#4$4*F zn$E!?sa4YF-#)L)4(lvpL;2`#Na-vlj@5tl&ide8#xS7q{GiyrX00Z5?&J@vo} zQc$WyeF*dp!y05!EohUWew1D^BqfXq1(E@3QGxU={fFYIEoRc+lmkXu8L*lf@g}fP zKtqM})S9dth7}=;>P=cF0A3`MgPH zrgf@NT><()>+V^t2QSJGpoWsO6i8BI;cOsG-Dcl#EDvtFwi9e6v;%ti_{%cxuBnKyO( z6M887s!%^8_S>@?uT6F{+P-aXHCiXC+URYRt7Wy-GHP08BBKv^DBo)lmr_$*X%-kp|!&6$C&or%L zy2TPT$c~l&pdXF10bC7gXv-+gBv7Dl$g{;Rg8+uyZ0<1pNj|1BzXEAsobi%cWpNpx zs6`p)?dxl5?Pr{2TI^$-$y#h=o>N-)(xY5K`KpB0`|WeC>>VD}nY7o_D@p?zziK1_ zyly_D|DzFLoZqrA5xSa>Wq?{S2H$K7Te3kL0kWtVTqy#zbkLd(S~>y@34OgI@>DUJ zhq3@(A)4bT8e;IB2kO01j?2IZREM=506!1Vf_+6;FT_Ixs7Vl}7}3rJtvTRVMB~98 zX`rPL)L}UjWkHHx9_U5w7P9F%k&;vj+6j|}t)Uq(cU{PU#{-5*-nhUDOtp3)LPrF7+r6{qVyBe-)YEl`KU&DBMsH0$XDCu*JF^WVH!V)5lC8h|;K+#J{Z;VcgN4}XP6hG)&b6Jd0P5j9ip{N+I7t)@7 z(=O2JLSz#tm&C;RU6iAiz*mQmgtS~qxVwsj3pSN-k3qG)I7=1xyLO zwpu4wf}c`DjZCK`sx=CwjxazyvZX|hO0LxCm7NK_Qb{N)OOy(QQbDMxRzjiF%XO+M zs378@P#R<^wLVI!s#NNUJitY)t(0jXFBxIb$rQ>;nXZD+mcH9g#>S+!G{nlXYPC#9 zgyyQ`IxQ48tVpTTL)HmVBp;yzY=m4O$(5R@BeSOKWHl;H8IfOF3S=iD35h|bQ7dbK z5IPmG8jmPa$qhh-9GOm`)EEeUVr-0w6QQrJs#2?f=A~MVA&Tg&ttKjEwL~?rfdN_@ zHWLOdA=fEo1|^SBsPt9913W^eQ4m!+6}Tz~D3GvBPgE&&l`4Y)kSnQ0mNwec04@Nx z=!}I@$bkockv~l0ROz&eYPo?&z&HYZd9aU>fl5Qvl&j?BW|GwarYenGU9EsYXOv#6 zQP&cosxZp0X6S&zpGixF0ko!0sW<3=-GCEX7{Kl(TAh&vLREl^L0JjCtWyE53T=%> zt(7TSTTe!r3WOg}M+;bi?`lI82xEm3stIw*mFlY276n5~Q%j>mhX8`WKIN(s6_7c~ z-X4ZpsaCDlq5z;x%_B-=dLXM-V+wVn6GF=khN`6KXr(5qMpdDzQYuukD6Ot68rGr# z<^Xz7gaP-Wu+T%nAepz1;w=jdXka z0_Yq)8U;XaU;-uRRi={x%P4q6sSb=<5Iyp8nXU}z3~dZ-1w0D66WS6mS~bupGL&hI zarXCX0R@uj^;)?~2IE7al~-2+kI1Nmpi%?NghGm~v?U~TsyPvcBvL2=K^5hPx8e~s zDnmJJHVZc%9d1y1<7Kr9#3{ua($Z1M7H~j=2x`b9Dzyq#Dg0F;gH%-mh4kfUzyi`G z)o}3YVIv(SK*MODxLygeH=qE0O<5Pok_Qg5s+LHhFX_u=K&ujE ztC1@+Q&b1p>p`M4sDO*W;H3uopBVs-w=5AM$CHNSu}hs>0OloNQ@t9+8nP&4V;E08t@P zAQVfp(+YEhVxpi>T#zpj0ru&DY+iO=h8S=W<%;s8QGhFGBSb}@LP)ZNIXQ@xunto)pG5ok;m0m+1^IUF?(fR+SyHb;&EYaT7yKf%3>2JKTyuv*+6>_!HG`faq6 zGF~^HrYopZa^?)?YUWnvHt^ZR{EoT2{gk}@l>EP8+xAoPzwVTry3^TyQvUCGQciiV z{j|LOw7mVay#2JiZ8mQ|F>ifhZnRGOsd@XUdHbpPzu2j{+5S?7<_Kf`D%x)I+d@=@m2UdCWi8-5a8?3vultB{meLqy$%4_ zZ~qUce?#FD+zPd(j4pfVsd6&-aD}=`4Ub5x)v0+zrcPPGBXVR0jZh~m0bBGpuEBFN z3Iox{5fAX;rh#8KiXFEz*}zS(v^;5Cg~gj-w+L3C0i{rB%GX+A&~qw1X;w^l)CdL zuXB~Nt%mfPd?&JT^HjG^A?)2_%Qpva7_+wK_S^>BL1Y8#A=$t@HTOj91zAb5E$t+}9x-tqSd{@#LR;*pw#WH=lN>gUgYWPm1 z18lT%wGt~8m0FF0&mnzb3)|Jb-6K)z>p9P?E`B^}5|N@?V*hgA$h%*7E&6`&CZ56jF8g$bM1SsN_`r|8 zYY1DK6!o%kxce^K)#XR`o)s=7X8qK8)0q2PYv;ch>(q33;-2pWiNcp&mp-Tn)+dw~ zY)@?}dwP5J=9m2g`p;ypIKi2B&G6Be)p5smSAX+(NX4YZv*+8ta`JURfMjcJEH1g>XQS?}BYPRWP?99p~do0W*Gx?EZ1aye4Yfy_rvUN%|JSNSrQc`Zw z6FKlGS*hTAkZus&!qq+|HYPDHF+va*C+I|y{*Y=Q%Zv0F<#zMcfZ?0DSg-54ymgNQ z41qB>?=L5dAc8+Dp9GBOX3x!R6w!ATX(#`oL(jY%wxDEl z>b{N#=T#PkFgK0DOPkirvYp5LzWDO%^n3T_4nBP4;F7~k>rqvkPL9|d*qHiisboar z^?TaE*VMB`-VBY}VNE0d)~F+8*SE-P8&=`HCT~9f{=DZuoXFVneB#t$7siar-}lAS z1LKDFod5g5W#2gUe{ijQ(Sa)u<~>|JZ{n#J;Uib=e)-Db4mNK0!8*?Ey0V3LzMWWn zF>}L`jZZIm`DZD<8fG6I|Cn`hU&4VO>O}T;?nd_x={{lao%>ba1RZ&v^0Cy*%kZ^D z;>elH5BuLa=U$d!;}ia?Yf)fLGTC5R1u|bRD(kUjcH$iBPkP!WyZW(zNj3o)i^?Wp zE!V|bEkFXkRh6pbGJ}#3RvXH-I+dZeCGU}B0vR7e3P?W6dvR1X4p#r^dHHW;z}pUr zg4(imyRS9bU^#Q9E`L^3ekc9xkv+ZgmwYwk{YueyClWvXb|L5ab7jMI&*PJu(q~!U zQcgL{EBtWV2*0To!#V z?)zlXxho&pX4RY?oq5V>)81Q4fBR@qq{Vv!_kJ7RcUGY90MRucH;y*pNAK8CgU019 z7*r}BI^`63ucS*aHjl0j`rN2n6l`M z!`gtb8(+;+P2f&BXqf)Xyv($RlI7bSNmX6*@nE7-D?4%7>eNp%ZBo%anV*Saf>+qQ<*lJEhN*uV14YXSpwK!&x4#|fcw$|D)r*B5EPQcLnf^#_{`yN6Mbm~o zyDiRrTw{?xdhc1=-zVKmwK--#KYS89U@#|T z<#go_+*_#u!Q*`+2mX{WPB<>y!|{x)%hd9L;`^sFznD8J z$tjI>?U?8}bWHr=yj=sYME# z=GL!(X@?GE8z$e*e2W4kX{p)r9Z2)5?qqaJM;4#UBA!3#9lwH;RJ``ifo!|Y$Lu#& z8eR)J{QC6SV{zlzo3VLhs#$kCzJQFG>o&^e&6{cSd`5Y_Yecu1e^?DTHo4!m%a;eH zPCQr8E6-A1=QTv9IW7NfRbcI0)~>-B&(3YFsL+qc6hP;R?{juxCCDVoH|Gb7JSBAwc)bo^Wg4Pe*I4m9qDrQ zV83a7mt3CoWA23D4te~;0fi$vFaG0RpyLPq-6lT>?cx)%>e0gu)4qB{9CP?`!PTR$ zAD{Una+kqr_XfA*-}hWQy{uvANW~c61U2u-j|rFZtBLc~tnMTbV~z zpX#USdTdP8yav~GWP|H!lX00$oZsL&546p0wKH{{1e(S(aORsk(*|5(X=`+*6WE+K z;Hkj2U=@WYfoP^lf7qMN#NV*}sQm@yRJ(&hWWAGA1Sgtk63N;-X^ukgQP*{PEoSlMCyQk@biFk`WU|cBJ+Z zW>00A?H>xN!Ca&z#45R#A3_FEsD9FN8GIy0Dv=N(NnR3P&?zl4O^})x8AHa$^MlDi zY7qLikc3aQMM?~^$|^!aJ?b^L!I2BrpijW=`(n%P8@H6LOaJ1JI}h)ngA2>H3)1)h z5+l%>FHT1dtBllQHORrU+Xg@ zY;?$P^C$mutFKr0S+;#*XU`IsH>QgMQYYbwhGqjk52sV1M$jg6Oj3 z@z?4jEypyU2)Z&&vb96NqGhwh*6;OLHS6YkpLG6Y-L|pp(YgVr7wgjko;Ui}CHj{* zhJ_wZPg?$KaeUgurh7Amo+qWR`@FngcYKW3oC}NU&rP3ssqw|iEk`E4|Ki}_S&>7& z!LPf^4@XUvTlLO==5*~s|9J7Hxv|z0_D(L$zyImr@1|Y2_vppzDE5rV?O&7`7quOL zMLf-xu+LAf`)R<$;$K!<^bWBR*xbs~p2%sv5|t;tH=h6TqqO1gXLQ&TTi$1MW#RvH zB?|lpyw_?!%>Un7iN1q&+r9Q%4mPacwgI2?;Wy>$&ez{LKj7u7W!GP<-5)dm%)@<@yKl8y`(%Y`>YZ z?S4b#?Q`>2F8aXFLbv{$aOjNu`f0yjI^C_%KPu=0_j4ackDTyyy2aI+kyxjp+?@;6 z596ly;ivC8zH1WOf6l`p>&VC>Dg6@8&0YS~FMmb%n_>L=Q4_WgoDDYMgF*7^ZQOv{ z)Q3~=*Z4Qsfb(PcvGH+n#tk^A3Sjl0oSOeurdr}>zoo``d=hc@mFu)8FX~iND-2Sn zQ(v*MhS4wYyB!>y3p@CH=6Be}b<+DXviE8--SPK< zV?UIp*`3MV*vWcr>cuI`PwUCzkcI^!`=fWBWY!g8LgJmxsdGP$hoN$Gax=jkKwAMMekxiRCw>aQOT3up5moXfNPWc^s{xLZSg zSKa>NP?uk#)4yB%_<~~VLVO2dGiB(ZS6jCKklwGq$1L0;pPOsUR5yV|e*V+#_ghm_ z`}Vul_IobcenU0?jdtKxH^=t0|9S6~>D}rFGrP`SFfTy6_SZ!RpYXpwV)65m<`>14 z-sIYydrE&8bot3I4~A~t_w}6MYtOgbtB@93NAzq+cX_!L_xFsO9#quBa6i>)|H^H3d+Q!1jOf$N_u&_{-9HgyD?2%QK8dcaSN^nQUB&f% zho2r8hG~0{=|S8sNA5Ad`h0Tlur2GGSXZmQjOpI7sc>a<^ZFZ;VmFrzm~p;1<@WD~ zzbO59!s|gLbr<(^xBVpK=fu0--3QBO4-cr^HEY5AE-&gvd_L&op^GYyU!C>b?^UO> zQy=gBiv4-x!mV?BRMztU_C}r2fanHp>$?-`)G;*@ka*hx{II9F22Ez3eb9T$;G1@45Z< zg%ey zfpaN&VzVKp$7T8OCtr+@?yxUlW$$$>E~IyvRxtR?yyJTcucKge+}?EXX6{S- zZT<3Z&*pe{ir#XhmqpZl`?&>i=kD8d^gXq9WE{u(yL0j9PYjrR$uKtCWnKew71_X; zj&|V9^Zqlp74w7HKs_5viwFlM!v{pZJ0{=yhYzQ4^q}87e&2j!)s`dDxQEZ@)?Xs) z&o8V$OV*$IXB(g+8BC#aG-;|T_}3qKQpy5?bWvg?nHDFCj1`fI{0K6PvH-WW1qccC zr%$;uJ^Y)KG__V1}2wl8lRwi|u1-NTeIc8j!tAds-IG zj|QoaO%s4q1si-Z7~V%2AiZ0mvJZ1+gTlIOBXYuJrxHIJBQ zR&FCQS}5lI=Cb~i$l)%d=iqZ+ts%!36xtui+^-JF9W3p${sTMB#RSE!Jx*te?gh)L zRC~YrGYwHelO1}x?OcC!Q`H^U;{=ViLsS%; z?k*kU;4yi>Uw)W|6Pgz3X@s>ADxs@M-b)kvl9#=_G-<(uKZZRWD{eB!5BaGAjtY20 z=5fkF9rDA`GJ!)2NI7-0#RA>1RH)E&_r5eCO|s2$j_#i(r_J|%-uLI{-uHUH_nf9q z9NcnZaNg(4>1ChnSXa2GXYHzqb2oQPimYeLyDI*0VYj=pc=3X5_xC^f`NNO*?Ja1p zxWDGzofC!JA8vn1TygZ^KUyz#if3NU+qb)``|Z`o`<~%@`_JAw{hk5)$u?@q1bp!B zHPzbQQ*wXLE0^|dddTs1&*tXhm%FA{8n&1t;TCrMgGZm3x25|nX3v`qx3(2{zPY7k z$Fi}1YFxbIxuqu0u{~Z6EH-)fE+K24CA2jQZDSqHt@TYS*!gqlw##Qud_A^)TI?Qg zDSO_uh0{+xx5T z5bmy;^dWJjr^;=poVu%F!Rf7sr#Ef2+!V9e5`Z()dTzcmQ`+?vuMc^Yt-An493oKXRmy-{n!e=`NX`) z>QjyN9iOdbel-8rU6mClSDk2iET+9Yc;vZ1Z+NF|O8aiAzTjN_D#5IniDSW z`)Y6fD}yCnW}I2I_0_K{k6Rl;e?0KUmx~$;JZs+k-Of7)H}LPD>-f#3g;T!ZUUOUy z;HxjbTXSG$u)q^}sB5672IhYOCw#!ll^kuR5Rp)O92r`&n=6m>E5-;r?AU9WQLOto$*y@tEoK!G)ic zc0F>YV9X6OD)ZWNRD6I7$rA7ME28LnGDk(}01gz%0dNP<>GTxDy6{6u~bhJHw$UinGLF!AKIgw9ahq#nXiu8HgM3J5)^q*wteKngGV zYrG6`0fFpqbRE5s-~aqZmBiyC9fh+vONvb*S6pEv)k}h<1codVa{M!uBa{OgeWtSs!35m0U`!Akf*mFxu+i8E$%jw-SEOw*!4r@NY$ zfs3#51@hJZd~dfJ__%!aT*ijmf(5+S_eh<@m(@LLch^=Oe3d33tOu$-?}gCW1c6u*Ix zR!L0j7X28!fDu{>Lyn+qoGpo5*v(63o6imZgT850z*X zoPnx*byUOg9%opP)n*H@^HCWTMrFzAR(P@VAsJXjcNqCq%CVNgdFJDZ3qEsPP&{MT zqCv?GA4;+)#RHb{zf43n+niE0WJl!Ed?-4|N7C{-$$?8M$M4W~4u!A{UdiZn8C$QW_lgr#XJmNt>|d1}9nWs2tY zt{9kr&Gz4E@%t0Z*lfncoX@nH!(kt1wCG8aE@TM}SxD$KbdklN(NhGYu`=+>XDqZu zH(gICWe&GRg+gRzAS62jZvJ|_rPyXE9()fk)%Oqsj0PDI%cFFS(lsImMx;Etx<=_5 z5d$Mq9$j4{qbvSrA8z>JZZp|2n6Gslum{dpM_@mZwH>BWN0sPntiDiCaMr@zjUDZa zfh>i2)W$#;iV97nC@W1+c`5I~%6xS^pb2cQ0}y})u(!oZMNx(W9F$@+10E3&s1<~= z)nQ@@x{xF-dO}AQS`2g%+9;*&lg$5H0|qGa2{JEXMFQMfT6wp~@rmAxporAgE4rFF zv@7>U=eWfkz2{wnZ=8bPwBOTGICb*8PtV-aP<+?;mwvIiVqGu!v1ujO`P00QyRVE} zFy*PoKm5zDmLKR0GW`##r$Iuwr87q7G2c`FZ zFs|sgmfV|vk3aulk#6D9ZOt9R(pjP6=JVR8Zupn>E-5SVQHl5Ds9^6(!rwzLf&nbU zPz*p<5*R$MKrawTVg$=(U?ggi)AVQqla3L33eLoEJrc(=^x$?D&R&`xX~?vt=#2bG zTAyjlz}7$AAFIpsr=#IH#PGNjj%U_}6YwY{3s;$#K8v0q6C>e4M7lqc$b!M6pA0=J z3r1(bvdk};r4CY;#WqVFWR^OpEOk&>>Y%dJ0gsz9YJ=+~1B0tE4Wsn>j5(r79elV9 zn@fhE@$@-o2v(no8FcCEg<(hr9>NUw$55&MU}NqRczD0TGn)8AiTuu=MDb^hQ Date: Fri, 26 May 2023 15:01:43 -0500 Subject: [PATCH 245/686] fix checkstyle --- .../src/main/java/org/dspace/app/itemimport/ItemImport.java | 2 +- .../test/java/org/dspace/app/itemimport/ItemImportCLIIT.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index bcf7afed38b5..c94e163243c1 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -78,7 +78,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean zip = false; protected boolean remoteUrl = false; protected String zipfilename = null; - protected boolean zipvalid= false; + protected boolean zipvalid = false; protected boolean help = false; protected File workDir = null; protected File workFile = null; diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index 02a0a8aee048..08ae3af4ae06 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -269,7 +269,8 @@ public void importItemByZipSafInvalidMimetype() throws Exception { Path.of(tempDir.toString() + "/" + PDF_NAME)); String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), - "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + "/mapfile.out" }; + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + + "/mapfile.out" }; try { perfomImportScript(args); } catch (Exception e) { From 43ab705568b136651d9086119a11001c73cc08e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 26 May 2023 22:47:02 +0100 Subject: [PATCH 246/686] fixing code style errors --- .../plugins/AccessStatusElementItemCompilePlugin.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java index 65ec251b21ce..a854b4b42d38 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -9,18 +9,19 @@ import java.sql.SQLException; import java.util.List; + +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; import org.dspace.access.status.factory.AccessStatusServiceFactory; import org.dspace.access.status.service.AccessStatusService; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; import org.dspace.xoai.util.ItemUtils; -import com.lyncode.xoai.dataprovider.xml.xoai.Element; -import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; /** * AccessStatusElementItemCompilePlugin aims to add structured information about the - * Access Status of the item (if any). + * Access Status of the item (if any). * The xoai document will be enriched with a structure like that *
@@ -32,7 +33,7 @@
  *   ;
  * }
  * 
- * Returning Values are based on: + * Returning Values are based on: * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper */ public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { From a9eab4a254b6fa151da0a86dfdf02217afdcf965 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 27 May 2023 08:23:46 +0100 Subject: [PATCH 247/686] also add support for access status at bitstream level --- .../crosswalks/oai/metadataFormats/oai_openaire.xsl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 19b1486f4cbd..3a1d75eb56c6 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -75,6 +75,9 @@ + + @@ -93,9 +96,6 @@ - - @@ -1162,11 +1162,11 @@ - + + select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/> From c11679c6defd3d68496006fdbb3c01869ef5a3ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 27 May 2023 09:19:15 +0100 Subject: [PATCH 248/686] removing tailing semicolon --- .../app/plugins/AccessStatusElementItemCompilePlugin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java index a854b4b42d38..6b3c5ded9882 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -29,8 +29,8 @@ * * * open.access - * ; - * ; + *
+ * * } * * Returning Values are based on: From d0d67768100f3c3adc7134c0b2af7d02c2fae4a5 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Mon, 29 May 2023 09:27:56 +0200 Subject: [PATCH 249/686] [CST-5728] LYRASIS [Donated]: DSpace7: Implement signposting pattern in DSpace (REST) --- .../org/dspace/util/FrontendUrlService.java | 23 +- .../controller/LinksetRestController.java | 119 ++--- .../LinksetHttpMessageConverter.java | 19 +- ...nverter.java => LinksetNodeConverter.java} | 12 +- .../app/rest/signposting/model/Linkset.java | 54 +- .../model/{Lset.java => LinksetNode.java} | 12 +- .../{Relation.java => LinksetRelation.java} | 4 +- .../model/LinksetRelationType.java | 35 ++ .../rest/signposting/model/LinksetRest.java | 14 +- .../rest/signposting/model/TypedLinkRest.java | 31 +- .../AbstractSignPostingProcessor.java} | 12 +- .../BitstreamSignPostingProcessor.java | 28 -- .../processor/ItemSignPostingProcessor.java | 30 -- .../processor/SignPostingProcessor.java | 17 +- .../bitstream/BitstreamLinksetProcessor.java | 66 +++ .../BitstreamParentItemProcessor.java | 56 +++ .../BitstreamSignpostingProcessor.java | 33 ++ .../bitstream/BitstreamTypeProcessor.java | 60 +++ .../processor/item/ItemAuthorProcessor.java | 109 ++++ .../item/ItemContentBitstreamsProcessor.java | 61 +++ .../item/ItemIdentifierProcessor.java | 67 +++ .../processor/item/ItemLicenseProcessor.java | 51 ++ .../item/ItemSignpostingProcessor.java | 34 ++ .../processor/item/ItemTypeProcessor.java | 61 +++ .../relation/BitstreamLicenseProcessor.java | 74 --- .../relation/BitstreamLinksetProcessor.java | 85 ---- .../BitstreamPublicationBundaryProcessor.java | 76 --- .../relation/BitstreamTypeProcessor.java | 74 --- .../relation/ItemAuthorProcessor.java | 109 ---- .../relation/ItemIdentifierProcessor.java | 89 ---- .../relation/ItemLandingPageProcessor.java | 59 --- .../relation/ItemLicenseProcessor.java | 79 --- .../ItemPublicationBundaryProcessor.java | 87 ---- .../relation/ItemTypeProcessor.java | 79 --- .../rest/signposting/utils/LinksetMapper.java | 54 ++ .../controller/LinksetRestControllerIT.java | 469 +++++++++++++++--- ...verter-dspace-to-schema-org-uri.properties | 23 + dspace/config/spring/rest/signposting.xml | 58 +-- 38 files changed, 1266 insertions(+), 1157 deletions(-) rename dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/{LsetConverter.java => LinksetNodeConverter.java} (70%) rename dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/{Lset.java => LinksetNode.java} (78%) rename dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/{Relation.java => LinksetRelation.java} (89%) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java rename dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/{relation/ASignPostingProcessor.java => processor/AbstractSignPostingProcessor.java} (74%) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java create mode 100644 dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties diff --git a/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java index 5e15c9c50a9e..a50baf910e77 100644 --- a/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java +++ b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java @@ -14,7 +14,7 @@ import java.util.List; import java.util.Optional; -import org.apache.logging.log4j.Logger; +import org.dspace.content.Bitstream; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.discovery.DiscoverQuery; @@ -22,6 +22,8 @@ import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.services.ConfigurationService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -31,7 +33,7 @@ @Component public class FrontendUrlService { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(FrontendUrlService.class); + private static final Logger log = LoggerFactory.getLogger(FrontendUrlService.class); @Autowired private ConfigurationService configurationService; @@ -42,16 +44,27 @@ public class FrontendUrlService { /** * Generates front-end url for specified item. * - * @param item item + * @param context context + * @param item item * @return front-end url */ - public String generateUrl(Item item) { + public String generateUrl(Context context, Item item) { String uiURL = configurationService.getProperty("dspace.ui.url"); - Context context = new Context(Context.Mode.READ_ONLY); return generateUrlWithSearchService(item, uiURL, context) .orElseGet(() -> uiURL + "/items/" + item.getID()); } + /** + * Generates front-end url for specified bitstream. + * + * @param bitstream bitstream + * @return front-end url + */ + public String generateUrl(Bitstream bitstream) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return uiURL + "/bitstreams/" + bitstream.getID() + "/download"; + } + private Optional generateUrlWithSearchService(Item item, String uiURLStem, Context context) { DiscoverQuery entityQuery = new DiscoverQuery(); entityQuery.setQuery("search.uniqueid:\"Item-" + item.getID() + "\" and entityType:*"); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index 9197e2cdf407..e828820c0c5f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -14,17 +14,15 @@ import java.util.List; import java.util.UUID; import java.util.stream.Collectors; -import java.util.stream.Stream; import javax.servlet.http.HttpServletRequest; import org.dspace.app.rest.converter.ConverterService; -import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRest; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; import org.dspace.app.rest.signposting.model.TypedLinkRest; -import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.bitstream.BitstreamSignpostingProcessor; +import org.dspace.app.rest.signposting.processor.item.ItemSignpostingProcessor; +import org.dspace.app.rest.signposting.utils.LinksetMapper; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.Utils; import org.dspace.content.Bitstream; @@ -34,8 +32,6 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.handle.service.HandleService; -import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -66,10 +62,6 @@ public class LinksetRestController { @Autowired private ItemService itemService; @Autowired - private HandleService handleService; - @Autowired - private ConfigurationService configurationService; - @Autowired private ConverterService converter; @PreAuthorize("permitAll()") @@ -91,35 +83,20 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) try { Context context = ContextUtil.obtainContext(request); - DSpaceObject dso = null; - dso = itemService.find(context, uuid); + Item dso = itemService.find(context, uuid); if (dso == null) { throw new ResourceNotFoundException("No such Item: " + uuid); } - List linksets = new ArrayList<>(); - Linkset primaryLinkset = new Linkset(); - linksets.add(primaryLinkset); - + List linksetNodes = new ArrayList<>(); if (dso.getType() == Constants.ITEM) { - primaryLinkset.setAnchor(handleService.resolveToURL( - context, dso.getHandle())); - List ispp = new DSpace().getServiceManager() - .getServicesByType(ItemSignPostingProcessor.class); - for (ItemSignPostingProcessor sp : ispp) { - sp.buildRelation(context, request, (Item) dso, linksets, primaryLinkset); - } - } - - LinksetRest linksetRest = null; - for (Linkset linkset : linksets) { - if (linksetRest == null) { - linksetRest = converter.toRest(linkset, utils.obtainProjection()); - } else { - linksetRest.getLinkset().add(linkset); + List ispp = new DSpace().getServiceManager() + .getServicesByType(ItemSignpostingProcessor.class); + for (ItemSignpostingProcessor sp : ispp) { + sp.addLinkSetNodes(context, request, dso, linksetNodes); } } - return linksetRest; + return converter.toRest(LinksetMapper.map(linksetNodes), utils.obtainProjection()); } catch (SQLException e) { throw new RuntimeException(e); } @@ -132,27 +109,24 @@ public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) try { Context context = ContextUtil.obtainContext(request); - DSpaceObject dso = null; - dso = itemService.find(context, uuid); - if (dso == null) { + Item item = itemService.find(context, uuid); + if (item == null) { throw new ResourceNotFoundException("No such Item: " + uuid); } - List lsets = new ArrayList<>(); - if (dso.getType() == Constants.ITEM) { - List ispp = new DSpace().getServiceManager() - .getServicesByType(ItemSignPostingProcessor.class); - for (ItemSignPostingProcessor sp : ispp) { - sp.buildLset(context, request, (Item) dso, lsets); - } + List linksetNodes = new ArrayList<>(); + List ispp = new DSpace().getServiceManager() + .getServicesByType(ItemSignpostingProcessor.class); + for (ItemSignpostingProcessor sp : ispp) { + sp.addLinkSetNodes(context, request, item, linksetNodes); } LinksetRest linksetRest = null; - for (Lset lset : lsets) { + for (LinksetNode linksetNode : linksetNodes) { if (linksetRest == null) { - linksetRest = converter.toRest(lset, utils.obtainProjection()); + linksetRest = converter.toRest(linksetNode, utils.obtainProjection()); } else { - linksetRest.getLset().add(lset); + linksetRest.getLinksetNodes().add(linksetNode); } } return linksetRest; @@ -167,8 +141,7 @@ public List getHeader(HttpServletRequest request, @PathVariable U try { Context context = ContextUtil.obtainContext(request); - DSpaceObject dso = null; - dso = bitstreamService.find(context, uuid); + DSpaceObject dso = bitstreamService.find(context, uuid); if (dso == null) { dso = itemService.find(context, uuid); if (dso == null) { @@ -176,52 +149,26 @@ public List getHeader(HttpServletRequest request, @PathVariable U } } - List linksets = new ArrayList<>(); - Linkset primaryLinkset = new Linkset(); - linksets.add(primaryLinkset); - + List linksetNodes = new ArrayList<>(); if (dso.getType() == Constants.ITEM) { - primaryLinkset.setAnchor(handleService.resolveToURL( - context, dso.getHandle())); - List ispp = new DSpace().getServiceManager() - .getServicesByType(ItemSignPostingProcessor.class); - for (ItemSignPostingProcessor sp : ispp) { - sp.buildRelation(context, request, (Item) dso, linksets, primaryLinkset); + List ispp = new DSpace().getServiceManager() + .getServicesByType(ItemSignpostingProcessor.class); + for (ItemSignpostingProcessor sp : ispp) { + sp.addLinkSetNodes(context, request, (Item) dso, linksetNodes); } } else { - List bspp = new DSpace().getServiceManager() - .getServicesByType(BitstreamSignPostingProcessor.class); - for (BitstreamSignPostingProcessor sp : bspp) { - sp.buildRelation(context, request, (Bitstream) dso, linksets, primaryLinkset); + List bspp = new DSpace().getServiceManager() + .getServicesByType(BitstreamSignpostingProcessor.class); + for (BitstreamSignpostingProcessor sp : bspp) { + sp.addLinkSetNodes(context, request, (Bitstream) dso, linksetNodes); } - String url = configurationService.getProperty("dspace.ui.url"); - primaryLinkset.setAnchor(url + "/bitstreams/" + dso.getID() + "/download"); } - return linksets.stream() - .flatMap(linkset -> mapTypedLinks(linkset).stream()) + return linksetNodes.stream() + .map(node -> new TypedLinkRest(node.getLink(), node.getRelation(), node.getType())) .collect(Collectors.toList()); } catch (SQLException e) { throw new RuntimeException(e); } } - - private static List mapTypedLinks(Linkset linkset) { - return Stream.of( - mapTypedLinks(TypedLinkRest.Relation.LANDING_PAGE, linkset.getLandingPage()), - mapTypedLinks(TypedLinkRest.Relation.ITEM, linkset.getItem()), - mapTypedLinks(TypedLinkRest.Relation.CITE_AS, linkset.getCiteAs()), - mapTypedLinks(TypedLinkRest.Relation.AUTHOR, linkset.getAuthor()), - mapTypedLinks(TypedLinkRest.Relation.TYPE, linkset.getType()), - mapTypedLinks(TypedLinkRest.Relation.LICENSE, linkset.getLicense()), - mapTypedLinks(TypedLinkRest.Relation.COLLECTION, linkset.getCollection()), - mapTypedLinks(TypedLinkRest.Relation.LINKSET, linkset.getLinkset()) - ).flatMap(List::stream).collect(Collectors.toList()); - } - - private static List mapTypedLinks(TypedLinkRest.Relation relationType, List relations) { - return relations.stream() - .map(relation -> new TypedLinkRest(relation.getHref(), relationType, relation.getType())) - .collect(Collectors.toList()); - } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java index 0cc6628c3bc8..e5c187c9554b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.signposting.converter; import static java.lang.String.format; +import static java.util.Objects.nonNull; import static org.apache.commons.lang.StringUtils.isNotBlank; import java.io.IOException; @@ -35,18 +36,18 @@ public LinksetHttpMessageConverter() { protected void writeInternal(LinksetRest linksetRest, Type type, HttpOutputMessage outputMessage) throws IOException, HttpMessageNotWritableException { StringBuilder responseBody = new StringBuilder(); - linksetRest.getLset().forEach(lset -> { - if (isNotBlank(lset.getLink())) { - responseBody.append(format("<%s> ", lset.getLink())); + linksetRest.getLinksetNodes().forEach(linksetNodes -> { + if (isNotBlank(linksetNodes.getLink())) { + responseBody.append(format("<%s> ", linksetNodes.getLink())); } - if (isNotBlank(lset.getRelation())) { - responseBody.append(format("; rel=\"%s\" ", lset.getRelation())); + if (nonNull(linksetNodes.getRelation())) { + responseBody.append(format("; rel=\"%s\" ", linksetNodes.getRelation().getName())); } - if (isNotBlank(lset.getType())) { - responseBody.append(format("; type=\"%s\" ", lset.getType())); + if (isNotBlank(linksetNodes.getType())) { + responseBody.append(format("; type=\"%s\" ", linksetNodes.getType())); } - if (isNotBlank(lset.getAnchor())) { - responseBody.append(format("; anchor=\"%s\" ", lset.getAnchor())); + if (isNotBlank(linksetNodes.getAnchor())) { + responseBody.append(format("; anchor=\"%s\" ", linksetNodes.getAnchor())); } responseBody.append(", "); }); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java similarity index 70% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java index 1e2e3884a434..e73aa260ad3e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LsetConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java @@ -9,8 +9,8 @@ import org.dspace.app.rest.converter.DSpaceConverter; import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRest; -import org.dspace.app.rest.signposting.model.Lset; import org.springframework.stereotype.Component; @@ -20,18 +20,18 @@ * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ @Component -public class LsetConverter implements DSpaceConverter { +public class LinksetNodeConverter implements DSpaceConverter { @Override - public LinksetRest convert(Lset lset, Projection projection) { + public LinksetRest convert(LinksetNode linkSetNode, Projection projection) { LinksetRest linksetRest = new LinksetRest(); linksetRest.setProjection(projection); - linksetRest.getLset().add(lset); + linksetRest.getLinksetNodes().add(linkSetNode); return linksetRest; } @Override - public Class getModelClass() { - return Lset.class; + public Class getModelClass() { + return LinksetNode.class; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java index f97d7a5df39c..c70e4916f69c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java @@ -19,102 +19,90 @@ public class Linkset { @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List author; + private List author; @JsonProperty("cite-as") @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List citeAs; + private List citeAs; @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List item; + private List item; @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List collection; + private List collection; @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List landingPage; + private List type; @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List type; + private List license; @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List license; - @JsonInclude(JsonInclude.Include.NON_EMPTY) - private List linkset; + private List linkset; @JsonInclude(JsonInclude.Include.NON_NULL) private String anchor; - public List getAuthor() { + public List getAuthor() { if (this.author == null) { this.author = new ArrayList<>(); } return author; } - public void setAuthor(List author) { + public void setAuthor(List author) { this.author = author; } - public List getCiteAs() { + public List getCiteAs() { if (this.citeAs == null) { this.citeAs = new ArrayList<>(); } return citeAs; } - public void setCiteAs(List citeAs) { + public void setCiteAs(List citeAs) { this.citeAs = citeAs; } - public List getItem() { + public List getItem() { if (this.item == null) { this.item = new ArrayList<>(); } return item; } - public void setItem(List item) { + public void setItem(List item) { this.item = item; } - public List getCollection() { + public List getCollection() { if (this.collection == null) { this.collection = new ArrayList<>(); } return collection; } - public void setCollection(List collection) { + public void setCollection(List collection) { this.collection = collection; } - public List getLandingPage() { - if (landingPage == null) { - landingPage = new ArrayList<>(); - } - return landingPage; - } - public void setLandingPage(List landingPage) { - this.landingPage = landingPage; - } - - public List getType() { + public List getType() { if (type == null) { type = new ArrayList<>(); } return type; } - public void setType(List type) { + public void setType(List type) { this.type = type; } - public List getLicense() { + public List getLicense() { if (license == null) { license = new ArrayList<>(); } return license; } - public void setLicense(List license) { + public void setLicense(List license) { this.license = license; } - public List getLinkset() { + public List getLinkset() { if (linkset == null) { linkset = new ArrayList<>(); } return linkset; } - public void setLinkset(List linkset) { + public void setLinkset(List linkset) { this.linkset = linkset; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java similarity index 78% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java index dbfabfcb00ae..8c7347350faa 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Lset.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java @@ -12,23 +12,23 @@ /** * DTO object represents a node of a link set. */ -public class Lset { +public class LinksetNode { @JsonInclude(JsonInclude.Include.NON_NULL) private String link; @JsonInclude(JsonInclude.Include.NON_NULL) - private String relation; + private LinksetRelationType relation; @JsonInclude(JsonInclude.Include.NON_NULL) private String type; @JsonInclude(JsonInclude.Include.NON_NULL) private String anchor; - public Lset(String link, String relation, String type, String anchor) { + public LinksetNode(String link, LinksetRelationType relation, String type, String anchor) { this(link, relation, anchor); this.type = type; } - public Lset(String link, String relation, String anchor) { + public LinksetNode(String link, LinksetRelationType relation, String anchor) { this.link = link; this.relation = relation; this.anchor = anchor; @@ -42,11 +42,11 @@ public void setLink(String link) { this.link = link; } - public String getRelation() { + public LinksetRelationType getRelation() { return relation; } - public void setRelation(String relation) { + public void setRelation(LinksetRelationType relation) { this.relation = relation; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java similarity index 89% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java index 9b3139fe1ee6..ecbb786079d0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Relation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java @@ -12,14 +12,14 @@ /** * DTO object represents a relation to specific resource. */ -public class Relation { +public class LinksetRelation { @JsonInclude(JsonInclude.Include.NON_NULL) private String href; @JsonInclude(JsonInclude.Include.NON_NULL) private String type; - public Relation(String href, String type) { + public LinksetRelation(String href, String type) { this.href = href; this.type = type; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java new file mode 100644 index 000000000000..7a513b5d998a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * An enumeration that holds track of linkset relation types. + */ +public enum LinksetRelationType { + + ITEM("item"), + CITE_AS("cite-as"), + AUTHOR("author"), + TYPE("type"), + LICENSE("license"), + COLLECTION("collection"), + LINKSET("linkset"); + + private final String name; + + LinksetRelationType(String name) { + this.name = name; + } + + @JsonValue + public String getName() { + return name; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java index 6558d75d5851..437e486ad097 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java @@ -33,7 +33,7 @@ public class LinksetRest extends RestAddressableModel { @JsonInclude(Include.NON_EMPTY) private List linkset; @JsonInclude(Include.NON_EMPTY) - private List lset; + private List linkSetNode; public List getLinkset() { if (this.linkset == null) { @@ -45,14 +45,14 @@ public void setLinkset(List linkset) { this.linkset = linkset; } - public List getLset() { - if (this.lset == null) { - this.lset = new ArrayList<>(); + public List getLinksetNodes() { + if (this.linkSetNode == null) { + this.linkSetNode = new ArrayList<>(); } - return lset; + return linkSetNode; } - public void setLset(List lset) { - this.lset = lset; + public void setLset(List linkSetNode) { + this.linkSetNode = linkSetNode; } @JsonIgnore diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java index 2a6bcc4a012f..3ba09bf1094c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java @@ -8,7 +8,6 @@ package org.dspace.app.rest.signposting.model; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonValue; import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.model.LinksRest; import org.dspace.app.rest.model.RestAddressableModel; @@ -25,14 +24,14 @@ public class TypedLinkRest extends RestAddressableModel { private String href; - private Relation rel; + private LinksetRelationType rel; private String type; public TypedLinkRest() { } - public TypedLinkRest(String href, Relation rel, String type) { + public TypedLinkRest(String href, LinksetRelationType rel, String type) { this.href = href; this.rel = rel; this.type = type; @@ -46,11 +45,11 @@ public void setHref(String href) { this.href = href; } - public Relation getRel() { + public LinksetRelationType getRel() { return rel; } - public void setRel(Relation rel) { + public void setRel(LinksetRelationType rel) { this.rel = rel; } @@ -72,26 +71,4 @@ public String getCategory() { public Class getController() { return RestResourceController.class; } - - public enum Relation { - LANDING_PAGE("landing page"), - ITEM("item"), - CITE_AS("cite-as"), - AUTHOR("author"), - TYPE("type"), - LICENSE("license"), - COLLECTION("collection"), - LINKSET("linkset"); - - private final String name; - - Relation(String name) { - this.name = name; - } - - @JsonValue - public String getName() { - return name; - } - } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java similarity index 74% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java index 22ae0a72b704..32368a57d595 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ASignPostingProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java @@ -5,7 +5,9 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.signposting.relation; +package org.dspace.app.rest.signposting.processor; + +import org.dspace.app.rest.signposting.model.LinksetRelationType; /** * An abstract class of generic signposting relation. @@ -13,11 +15,11 @@ * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ -public abstract class ASignPostingProcessor { +public abstract class AbstractSignPostingProcessor { private String metadataField; - private String relation; + private LinksetRelationType relation; private String pattern; @@ -29,11 +31,11 @@ public void setMetadataField(String metadataField) { this.metadataField = metadataField; } - public String getRelation() { + public LinksetRelationType getRelation() { return relation; } - public void setRelation(String relation) { + public void setRelation(LinksetRelationType relation) { this.relation = relation; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java deleted file mode 100644 index e80c3b4c39c2..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/BitstreamSignPostingProcessor.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.processor; - -import org.dspace.content.Bitstream; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; - -/** - * BitstreamSignPostingProcessor interface represents SignPostingProcessor for a bitstream. - * - * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -public interface BitstreamSignPostingProcessor extends SignPostingProcessor { - - default String buildAnchor(Bitstream bitstream) { - ConfigurationService configurationService = - DSpaceServicesFactory.getInstance().getConfigurationService(); - String url = configurationService.getProperty("dspace.ui.url"); - return url + "/bitstreams/" + bitstream.getID() + "/download"; - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java deleted file mode 100644 index 2d4b21464cb3..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/ItemSignPostingProcessor.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.processor; - -import java.sql.SQLException; - -import org.dspace.content.Item; -import org.dspace.core.Context; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; - -/** - * ItemSignPostingProcessor interface represents SignPostingProcessor for an item. - * - * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -public interface ItemSignPostingProcessor extends SignPostingProcessor { - - default String buildAnchor(Context context, Item item) throws SQLException { - HandleService handleService = - HandleServiceFactory.getInstance().getHandleService(); - return handleService.resolveToURL(context, item.getHandle()); - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java index aab13b6626b1..efcfd50ab512 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java @@ -10,8 +10,7 @@ import java.util.List; import javax.servlet.http.HttpServletRequest; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; +import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; @@ -22,9 +21,15 @@ * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ public interface SignPostingProcessor { - void buildRelation(Context context, HttpServletRequest request, - T object, List linksets, Linkset primaryLinkset); - void buildLset(Context context, HttpServletRequest request, - T object, List lsets); + /** + * Method for adding new linkset nodes into {@code linksetNodes}. + * + * @param context context + * @param request request + * @param object object + * @param linksetNodes linkset nodes + */ + void addLinkSetNodes(Context context, HttpServletRequest request, + T object, List linksetNodes); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java new file mode 100644 index 000000000000..c65191cb0749 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link BitstreamSignpostingProcessor} for the linkset relation. + */ +public class BitstreamLinksetProcessor extends BitstreamSignpostingProcessor { + + private static final Logger log = Logger.getLogger(BitstreamLinksetProcessor.class); + + private final BitstreamService bitstreamService; + + private final ConfigurationService configurationService; + + public BitstreamLinksetProcessor(FrontendUrlService frontendUrlService, + BitstreamService bitstreamService, + ConfigurationService configurationService) { + super(frontendUrlService); + this.bitstreamService = bitstreamService; + this.configurationService = configurationService; + setRelation(LinksetRelationType.LINKSET); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Bitstream bitstream, List linksetNodes) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String signpostingPath = configurationService.getProperty("signposting.path"); + String baseUrl = configurationService.getProperty("dspace.ui.url"); + + String linksetUrl = String.format("%s/%s/linksets/%s", baseUrl, signpostingPath, item.getID()); + String linksetJsonUrl = linksetUrl + "/json"; + List links = List.of( + new LinksetNode(linksetUrl, getRelation(), "application/linkset", buildAnchor(bitstream)), + new LinksetNode(linksetJsonUrl, getRelation(), "application/linkset+json", + buildAnchor(bitstream)) + ); + linksetNodes.addAll(links); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java new file mode 100644 index 000000000000..815d7817d4cf --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link BitstreamSignpostingProcessor} for the collection relation. + * It links the Bitstream to the parent Item. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class BitstreamParentItemProcessor extends BitstreamSignpostingProcessor { + + private static final Logger log = Logger.getLogger(BitstreamParentItemProcessor.class); + + private final BitstreamService bitstreamService; + + public BitstreamParentItemProcessor(FrontendUrlService frontendUrlService, + BitstreamService bitstreamService) { + super(frontendUrlService); + this.bitstreamService = bitstreamService; + setRelation(LinksetRelationType.COLLECTION); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Bitstream bitstream, List linksetNodes) { + try { + Item item = (Item) bitstreamService.getParentObject(context, bitstream); + if (item != null) { + String itemUiUrl = frontendUrlService.generateUrl(context, item); + linksetNodes.add(new LinksetNode(itemUiUrl, getRelation(), "text/html", buildAnchor(bitstream))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java new file mode 100644 index 000000000000..b0f251edb5ee --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.SignPostingProcessor; +import org.dspace.content.Bitstream; +import org.dspace.util.FrontendUrlService; + +/** + * An abstract class represents {@link SignPostingProcessor } for a bitstream. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public abstract class BitstreamSignpostingProcessor extends AbstractSignPostingProcessor + implements SignPostingProcessor { + + protected final FrontendUrlService frontendUrlService; + + public BitstreamSignpostingProcessor(FrontendUrlService frontendUrlService) { + this.frontendUrlService = frontendUrlService; + } + + public String buildAnchor(Bitstream bitstream) { + return frontendUrlService.generateUrl(bitstream); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java new file mode 100644 index 000000000000..2f46193ca81d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java @@ -0,0 +1,60 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.bitstream; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.dspace.util.SimpleMapConverter; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An extension of {@link BitstreamSignpostingProcessor} for the type relation. + * Provides links to a specific type from schema.org. + */ +public class BitstreamTypeProcessor extends BitstreamSignpostingProcessor { + + private static final Logger log = Logger.getLogger(BitstreamTypeProcessor.class); + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; + + @Autowired + private BitstreamService bitstreamService; + + public BitstreamTypeProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.TYPE); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Bitstream bitstream, List linksetNodes) { + try { + String type = bitstreamService.getMetadataFirstValue(bitstream, "dc", "type", null, Item.ANY); + if (StringUtils.isNotBlank(type)) { + String typeSchemeUri = mapConverterDSpaceToSchemaOrgUri.getValue(type); + linksetNodes.add( + new LinksetNode(typeSchemeUri, getRelation(), "text/html", buildAnchor(bitstream)) + ); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java new file mode 100644 index 000000000000..b935e50bfad8 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import static org.dspace.content.Item.ANY; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.service.EntityTypeService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the author relation. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemAuthorProcessor extends ItemSignpostingProcessor { + + private static final String IS_AUTHOR_OF = "isAuthorOf"; + + /** + * log4j category + */ + private static final Logger log = Logger.getLogger(ItemAuthorProcessor.class); + + private final ItemService itemService; + + private final RelationshipService relationshipService; + + private final EntityTypeService entityTypeService; + + private String orcidMetadata; + + public ItemAuthorProcessor(FrontendUrlService frontendUrlService, + ItemService itemService, + RelationshipService relationshipService, + EntityTypeService entityTypeService) { + super(frontendUrlService); + this.itemService = itemService; + this.relationshipService = relationshipService; + this.entityTypeService = entityTypeService; + setRelation(LinksetRelationType.AUTHOR); + } + + public String getOrcidMetadata() { + return orcidMetadata; + } + + public void setOrcidMetadata(String orcidMetadata) { + this.orcidMetadata = orcidMetadata; + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + EntityType personType = entityTypeService.findByEntityType(context, "Author"); + List itemRelationships = relationshipService.findByItem(context, item); + for (Relationship relationship : itemRelationships) { + + RelationshipType relationshipType = relationship.getRelationshipType(); + boolean hasPersonType = relationshipType.getLeftType().equals(personType) + || relationshipType.getRightType().equals(personType); + boolean isAuthor = relationshipType.getLeftwardType().startsWith(IS_AUTHOR_OF) + || relationshipType.getRightwardType().startsWith(IS_AUTHOR_OF); + + if (hasPersonType && isAuthor) { + Item authorItem = relationship.getLeftItem().getID().equals(item.getID()) + ? relationship.getRightItem() + : relationship.getLeftItem(); + + String authorOrcid = itemService.getMetadataFirstValue( + authorItem, new MetadataFieldName(getOrcidMetadata()), ANY + ); + if (StringUtils.isNotBlank(authorOrcid)) { + String authorLink = StringUtils.isBlank(getPattern()) + ? authorOrcid + : MessageFormat.format(getPattern(), authorOrcid); + linksetNodes.add( + new LinksetNode(authorLink, getRelation(), "text/html", buildAnchor(context, item)) + ); + } + } + } + } catch (Exception e) { + log.error("Problem to add signposting pattern", e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java new file mode 100644 index 000000000000..61bf371adbdf --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.sql.SQLException; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the item relation. + * It links item with its content. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemContentBitstreamsProcessor extends ItemSignpostingProcessor { + + /** + * log4j category + */ + private static final Logger log = Logger.getLogger(ItemContentBitstreamsProcessor.class); + + public ItemContentBitstreamsProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.ITEM); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + for (Bundle bundle : item.getBundles(Constants.CONTENT_BUNDLE_NAME)) { + for (Bitstream bitstream : bundle.getBitstreams()) { + String mimeType = bitstream.getFormat(context).getMIMEType(); + String bitstreamUrl = frontendUrlService.generateUrl(bitstream); + linksetNodes.add( + new LinksetNode(bitstreamUrl, getRelation(), mimeType, buildAnchor(context, item)) + ); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java new file mode 100644 index 000000000000..01151fd7d37c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.text.MessageFormat; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the identifier relation. + * Identifier metadata can be specified with metadataField in configuration. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemIdentifierProcessor extends ItemSignpostingProcessor { + + /** + * log4j category + */ + private static final Logger log = Logger.getLogger(ItemIdentifierProcessor.class); + + private final ItemService itemService; + + public ItemIdentifierProcessor(FrontendUrlService frontendUrlService, ItemService itemService) { + super(frontendUrlService); + this.itemService = itemService; + setRelation(LinksetRelationType.CITE_AS); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + List identifiers = itemService.getMetadataByMetadataString(item, getMetadataField()); + for (MetadataValue identifier : identifiers) { + if (identifier != null) { + String identifierValue = identifier.getValue(); + if (StringUtils.isNotBlank(identifierValue)) { + if (StringUtils.isNotBlank(getPattern())) { + identifierValue = MessageFormat.format(getPattern(), identifierValue); + } + linksetNodes.add(new LinksetNode(identifierValue, getRelation(), buildAnchor(context, item))); + } + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java new file mode 100644 index 000000000000..2c356129963e --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.license.factory.LicenseServiceFactory; +import org.dspace.license.service.CreativeCommonsService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the license relation. + */ +public class ItemLicenseProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemLicenseProcessor.class); + + private final CreativeCommonsService creativeCommonsService = + LicenseServiceFactory.getInstance().getCreativeCommonsService(); + + public ItemLicenseProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.LICENSE); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + String licenseUrl = creativeCommonsService.getLicenseURL(context, item); + if (StringUtils.isNotBlank(licenseUrl)) { + linksetNodes.add(new LinksetNode(licenseUrl, getRelation(), "text/html", buildAnchor(context, item))); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java new file mode 100644 index 000000000000..2ec26632a7e0 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.SignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An abstract class represents {@link SignPostingProcessor } for an item. + * + * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public abstract class ItemSignpostingProcessor extends AbstractSignPostingProcessor + implements SignPostingProcessor { + + protected final FrontendUrlService frontendUrlService; + + public ItemSignpostingProcessor(FrontendUrlService frontendUrlService) { + this.frontendUrlService = frontendUrlService; + } + + public String buildAnchor(Context context, Item item) { + return frontendUrlService.generateUrl(context, item); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java new file mode 100644 index 000000000000..f1f41de055ee --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.dspace.util.SimpleMapConverter; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An extension of {@link ItemSignpostingProcessor} for the type relation. + * Provides links to a specific type from schema.org. + */ +public class ItemTypeProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemTypeProcessor.class); + private static final String ABOUT_PAGE_URI = "https://schema.org/AboutPage"; + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; + + @Autowired + private ItemService itemService; + + public ItemTypeProcessor(FrontendUrlService frontendUrlService) { + super(frontendUrlService); + setRelation(LinksetRelationType.TYPE); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + linksetNodes.add(new LinksetNode(ABOUT_PAGE_URI, getRelation(), "text/html", buildAnchor(context, item))); + String type = itemService.getMetadataFirstValue(item, "dc", "type", null, Item.ANY); + if (StringUtils.isNotBlank(type)) { + String typeSchemeUri = mapConverterDSpaceToSchemaOrgUri.getValue(type); + linksetNodes.add( + new LinksetNode(typeSchemeUri, getRelation(), "text/html", buildAnchor(context, item)) + ); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java deleted file mode 100644 index 99439770417c..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLicenseProcessor.java +++ /dev/null @@ -1,74 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.text.MessageFormat; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Bitstream; -import org.dspace.content.service.BitstreamService; -import org.dspace.core.Context; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the license relation. - */ -public class BitstreamLicenseProcessor extends ASignPostingProcessor - implements BitstreamSignPostingProcessor { - - private static Logger log = Logger.getLogger(BitstreamLicenseProcessor.class); - - @Autowired - private BitstreamService bitstreamService; - - public BitstreamLicenseProcessor() { - setRelation("license"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Bitstream bitstream, List linksets, - Linkset primaryLinkset) { - try { - String license = bitstreamService.getMetadata(bitstream, getMetadataField()); - if (StringUtils.isNotBlank(license)) { - if (StringUtils.isNotBlank(getPattern())) { - license = MessageFormat.format(getPattern(), license); - } - primaryLinkset.getLicense().add(new Relation(license, null)); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Bitstream bitstream, List lsets) { - try { - String license = bitstreamService.getMetadata(bitstream, getMetadataField()); - if (StringUtils.isNotBlank(license)) { - if (StringUtils.isNotBlank(getPattern())) { - license = MessageFormat.format(getPattern(), license); - } - lsets.add(new Lset(license, getRelation(), buildAnchor(bitstream))); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java deleted file mode 100644 index 3c13b767ee0c..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamLinksetProcessor.java +++ /dev/null @@ -1,85 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Bitstream; -import org.dspace.content.Item; -import org.dspace.content.service.BitstreamService; -import org.dspace.core.Context; -import org.dspace.services.ConfigurationService; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the linkset relation. - */ -public class BitstreamLinksetProcessor extends ASignPostingProcessor - implements BitstreamSignPostingProcessor { - - private static Logger log = Logger.getLogger(BitstreamLinksetProcessor.class); - - @Autowired - private BitstreamService bitstreamService; - - @Autowired - private ConfigurationService configurationService; - - public BitstreamLinksetProcessor() { - setRelation("linkset"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Bitstream bitstream, List linksets, - Linkset primaryLinkset) { - try { - Item item = (Item) bitstreamService.getParentObject(context, bitstream); - if (item != null) { - String baseUrl = configurationService.getProperty("dspace.server.url"); - String linksetUrl = baseUrl + "/signposting/linksets/" + item.getID(); - String linksetJsonUrl = baseUrl + "/signposting/linksets/" + item.getID() + "/json"; - List relations = List.of( - new Relation(linksetUrl, "application/linkset"), - new Relation(linksetJsonUrl, "application/linkset+json") - ); - primaryLinkset.getLinkset().addAll(relations); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Bitstream bitstream, List lsets) { - try { - Item item = (Item) bitstreamService.getParentObject(context, bitstream); - if (item != null) { - String baseUrl = configurationService.getProperty("dspace.server.url"); - String linksetUrl = baseUrl + "/signposting/linksets/" + item.getID(); - String linksetJsonUrl = baseUrl + "/signposting/linksets/" + item.getID() + "/json"; - List links = List.of( - new Lset(linksetUrl, getRelation(), "application/linkset", buildAnchor(bitstream)), - new Lset(linksetJsonUrl, getRelation(), "application/linkset+json", buildAnchor(bitstream)) - ); - lsets.addAll(links); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java deleted file mode 100644 index 9d86856d6f90..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamPublicationBundaryProcessor.java +++ /dev/null @@ -1,76 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Bitstream; -import org.dspace.content.Item; -import org.dspace.content.service.BitstreamService; -import org.dspace.core.Context; -import org.dspace.util.FrontendUrlService; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the collection relation. - * - * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -public class BitstreamPublicationBundaryProcessor extends ASignPostingProcessor - implements BitstreamSignPostingProcessor { - - private static Logger log = Logger.getLogger(BitstreamPublicationBundaryProcessor.class); - - @Autowired - private BitstreamService bitstreamService; - - @Autowired - private FrontendUrlService frontendUrlService; - - public BitstreamPublicationBundaryProcessor() { - setRelation("collection"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Bitstream bitstream, List linksets, - Linkset primaryLinkset) { - try { - Item item = (Item) bitstreamService.getParentObject(context, bitstream); - if (item != null) { - String itemUiUrl = frontendUrlService.generateUrl(item); - primaryLinkset.getCollection().add(new Relation(itemUiUrl, "text/html")); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Bitstream bitstream, List lsets) { - try { - Item item = (Item) bitstreamService.getParentObject(context, bitstream); - if (item != null) { - String itemUiUrl = frontendUrlService.generateUrl(item); - lsets.add(new Lset(itemUiUrl, getRelation(), "text/html", buildAnchor(bitstream))); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java deleted file mode 100644 index 5c4569e4c165..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/BitstreamTypeProcessor.java +++ /dev/null @@ -1,74 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.text.MessageFormat; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang.StringUtils; -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.BitstreamSignPostingProcessor; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Bitstream; -import org.dspace.content.service.BitstreamService; -import org.dspace.core.Context; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the type relation. - */ -public class BitstreamTypeProcessor extends ASignPostingProcessor - implements BitstreamSignPostingProcessor { - - private static Logger log = Logger.getLogger(BitstreamTypeProcessor.class); - - @Autowired - private BitstreamService bitstreamService; - - public BitstreamTypeProcessor() { - setRelation("type"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Bitstream bitstream, List linksets, - Linkset primaryLinkset) { - try { - String type = bitstreamService.getMetadata(bitstream, getMetadataField()); - if (StringUtils.isNotBlank(type)) { - if (StringUtils.isNotBlank(getPattern())) { - type = MessageFormat.format(getPattern(), type); - } - primaryLinkset.getType().add(new Relation(type, null)); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Bitstream bitstream, List lsets) { - try { - String type = bitstreamService.getMetadata(bitstream, getMetadataField()); - if (StringUtils.isNotBlank(type)) { - if (StringUtils.isNotBlank(getPattern())) { - type = MessageFormat.format(getPattern(), type); - } - lsets.add(new Lset(type, getRelation(), buildAnchor(bitstream))); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java deleted file mode 100644 index bdaecd41f201..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemAuthorProcessor.java +++ /dev/null @@ -1,109 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import static org.dspace.content.Item.ANY; -import static org.dspace.content.MetadataSchemaEnum.PERSON; - -import java.util.List; -import java.util.UUID; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Item; -import org.dspace.content.MetadataSchemaEnum; -import org.dspace.content.MetadataValue; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.util.FrontendUrlService; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the author relation. - * - * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -public class ItemAuthorProcessor extends ASignPostingProcessor - implements ItemSignPostingProcessor { - - /** log4j category */ - private static Logger log = Logger.getLogger(ItemAuthorProcessor.class); - - @Autowired - private ItemService itemService; - - @Autowired - private FrontendUrlService frontendUrlService; - - private String retrievedExternally; - - public String getRetrievedExternally() { - return retrievedExternally; - } - - public void setRetrievedExternally(String retrievedExternally) { - this.retrievedExternally = retrievedExternally; - } - - public ItemAuthorProcessor() { - setRelation("author"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Item item, List linksets, Linkset primaryLinkset) { - try { - List authors = itemService - .getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", ANY, ANY); - for (MetadataValue author : authors) { - if (author.getAuthority() != null) { - String authorUuid = author.getAuthority(); - Item authorItem = itemService.find(context, UUID.fromString(authorUuid)); - String authorOrcid = itemService - .getMetadataFirstValue(authorItem, PERSON.getName(), "identifier", "orcid", ANY); - if (StringUtils.isNotBlank(authorOrcid)) { - String href = frontendUrlService.generateUrl(authorItem); - primaryLinkset.getAuthor().add(new Relation(href, authorOrcid)); - } - } - } - } catch (Exception e) { - log.error("Problem to add signposting pattern", e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Item item, List lsets) { - try { - List authors = itemService - .getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", ANY, ANY); - for (MetadataValue author : authors) { - if (author.getAuthority() != null) { - String authorUuid = author.getAuthority(); - Item authorItem = itemService.find(context, UUID.fromString(authorUuid)); - String authorOrcid = itemService - .getMetadataFirstValue(authorItem, PERSON.getName(), "identifier", "orcid", ANY); - if (StringUtils.isNotBlank(authorOrcid)) { - String href = frontendUrlService.generateUrl(authorItem); - lsets.add(new Lset(href, getRelation(), authorOrcid, buildAnchor(context, item))); - } - } - } - } catch (Exception e) { - log.error("Problem to add signposting pattern", e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java deleted file mode 100644 index 5f5f5d86b6a7..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemIdentifierProcessor.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.text.MessageFormat; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Item; -import org.dspace.content.MetadataValue; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the author relation. - * - * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -public class ItemIdentifierProcessor extends ASignPostingProcessor - implements ItemSignPostingProcessor { - - /** - * log4j category - */ - private static Logger log = Logger.getLogger(ItemIdentifierProcessor.class); - - @Autowired - private ItemService itemService; - - public ItemIdentifierProcessor() { - setRelation("cite-as"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Item item, List linksets, Linkset primaryLinkset) { - try { - List identifiers = itemService.getMetadataByMetadataString(item, getMetadataField()); - for (MetadataValue identifier : identifiers) { - if (identifier != null) { - String identifierValue = identifier.getValue(); - if (StringUtils.isNotBlank(identifierValue)) { - if (StringUtils.isNotBlank(getPattern())) { - identifierValue = MessageFormat.format(getPattern(), identifierValue); - } - primaryLinkset.getCiteAs().add(new Relation(identifierValue, null)); - } - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Item item, List lsets) { - try { - List identifiers = itemService.getMetadataByMetadataString(item, getMetadataField()); - for (MetadataValue identifier : identifiers) { - if (identifier != null) { - String identifierValue = identifier.getValue(); - if (StringUtils.isNotBlank(identifierValue)) { - if (StringUtils.isNotBlank(getPattern())) { - identifierValue = MessageFormat.format(getPattern(), identifierValue); - } - lsets.add(new Lset(identifierValue, getRelation(), buildAnchor(context, item))); - } - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java deleted file mode 100644 index 8c048a9f7c40..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLandingPageProcessor.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.model.TypedLinkRest; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Item; -import org.dspace.core.Context; -import org.dspace.util.FrontendUrlService; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the Landing Page relation. - */ -public class ItemLandingPageProcessor extends ASignPostingProcessor implements ItemSignPostingProcessor { - - private static Logger log = Logger.getLogger(ItemLandingPageProcessor.class); - - @Autowired - private FrontendUrlService frontendUrlService; - - public ItemLandingPageProcessor() { - setRelation(TypedLinkRest.Relation.LANDING_PAGE.getName()); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Item item, List linksets, Linkset primaryLinkset) { - try { - String landingPageUrl = frontendUrlService.generateUrl(item); - primaryLinkset.getLandingPage().add(new Relation(landingPageUrl, "text/html")); - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, Item item, List lsets) { - try { - String landingPageUrl = frontendUrlService.generateUrl(item); - lsets.add(new Lset(landingPageUrl, getRelation(), "text/html", buildAnchor(context, item))); - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java deleted file mode 100644 index 90177f8c2a93..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemLicenseProcessor.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.text.MessageFormat; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Item; -import org.dspace.content.MetadataFieldName; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the license relation. - */ -public class ItemLicenseProcessor extends ASignPostingProcessor - implements ItemSignPostingProcessor { - - private static Logger log = Logger.getLogger(ItemLicenseProcessor.class); - - @Autowired - private ItemService itemService; - - public ItemLicenseProcessor() { - setRelation("license"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Item item, List linksets, Linkset primaryLinkset) { - try { - if (StringUtils.isNotEmpty(getMetadataField())) { - String license = itemService - .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); - if (StringUtils.isNotBlank(license)) { - if (StringUtils.isNotBlank(getPattern())) { - license = MessageFormat.format(getPattern(), license); - } - primaryLinkset.getLicense().add(new Relation(license, null)); - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Item item, List lsets) { - try { - if (StringUtils.isNotEmpty(getMetadataField())) { - String license = itemService - .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); - if (StringUtils.isNotBlank(license)) { - if (StringUtils.isNotBlank(getPattern())) { - license = MessageFormat.format(getPattern(), license); - } - lsets.add(new Lset(license, getRelation(), buildAnchor(context, item))); - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java deleted file mode 100644 index 265b991521ae..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemPublicationBundaryProcessor.java +++ /dev/null @@ -1,87 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.sql.SQLException; -import java.text.MessageFormat; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Bitstream; -import org.dspace.content.Bundle; -import org.dspace.content.Item; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.services.ConfigurationService; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the item relation. - * - * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.com) - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -public class ItemPublicationBundaryProcessor extends ASignPostingProcessor - implements ItemSignPostingProcessor { - - /** - * log4j category - */ - private static Logger log = Logger - .getLogger(ItemPublicationBundaryProcessor.class); - - @Autowired - private ConfigurationService configurationService; - - public ItemPublicationBundaryProcessor() { - setRelation("item"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Item item, List linksets, Linkset primaryLinkset) { - String url = configurationService.getProperty("dspace.ui.url"); - try { - for (Bundle bundle : item.getBundles(Constants.CONTENT_BUNDLE_NAME)) { - for (Bitstream bitstream : bundle.getBitstreams()) { - String mimeType = bitstream.getFormat(context).getMIMEType(); - primaryLinkset.getItem().add( - new Relation( - MessageFormat.format(getPattern(), - url, "bitstreams", bitstream.getID() + "/download"), mimeType)); - } - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Item item, List lsets) { - String url = configurationService.getProperty("dspace.ui.url"); - try { - for (Bundle bundle : item.getBundles(Constants.CONTENT_BUNDLE_NAME)) { - for (Bitstream bitstream : bundle.getBitstreams()) { - String mimeType = bitstream.getFormat(context).getMIMEType(); - lsets.add(new Lset(MessageFormat.format(getPattern(), - url, "bitstreams", bitstream.getID() + "/download"), - getRelation(), mimeType, buildAnchor(context, item))); - } - } - } catch (SQLException e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java deleted file mode 100644 index 1d5ecb0a8992..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/relation/ItemTypeProcessor.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.relation; - -import java.text.MessageFormat; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; -import org.dspace.app.rest.signposting.model.Linkset; -import org.dspace.app.rest.signposting.model.Lset; -import org.dspace.app.rest.signposting.model.Relation; -import org.dspace.app.rest.signposting.processor.ItemSignPostingProcessor; -import org.dspace.content.Item; -import org.dspace.content.MetadataFieldName; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * An implementation of {@link ItemSignPostingProcessor} for the type relation. - */ -public class ItemTypeProcessor extends ASignPostingProcessor - implements ItemSignPostingProcessor { - - private static Logger log = Logger.getLogger(ItemTypeProcessor.class); - - @Autowired - private ItemService itemService; - - public ItemTypeProcessor() { - setRelation("type"); - } - - @Override - public void buildRelation(Context context, HttpServletRequest request, - Item item, List linksets, Linkset primaryLinkset) { - try { - if (StringUtils.isNotBlank(getMetadataField())) { - String itemType = itemService - .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); - if (StringUtils.isNotBlank(itemType)) { - if (StringUtils.isNotBlank(getPattern())) { - itemType = MessageFormat.format(getPattern(), itemType); - } - primaryLinkset.getType().add(new Relation(itemType, null)); - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - @Override - public void buildLset(Context context, HttpServletRequest request, - Item item, List lsets) { - try { - if (StringUtils.isNotEmpty(getMetadataField())) { - String itemType = itemService - .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); - if (StringUtils.isNotBlank(itemType)) { - if (StringUtils.isNotBlank(getPattern())) { - itemType = MessageFormat.format(getPattern(), itemType); - } - lsets.add(new Lset(itemType, getRelation(), buildAnchor(context, item))); - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java new file mode 100644 index 000000000000..31860143b997 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.utils; + +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.app.rest.signposting.model.Linkset; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelation; +import org.dspace.app.rest.signposting.model.LinksetRelationType; + +/** + * Class for mapping {@link Linkset} objects. + */ +public class LinksetMapper { + + private LinksetMapper() { + } + + /** + * Converts list of linkset nodes into linkset. + * + * @param linksetNodes + * @return linkset + */ + public static Linkset map(List linksetNodes) { + Linkset linkset = new Linkset(); + linkset.setLinkset(getLinksetRelationsByType(linksetNodes, LinksetRelationType.LINKSET)); + linkset.setAuthor(getLinksetRelationsByType(linksetNodes, LinksetRelationType.AUTHOR)); + linkset.setItem(getLinksetRelationsByType(linksetNodes, LinksetRelationType.ITEM)); + linkset.setType(getLinksetRelationsByType(linksetNodes, LinksetRelationType.TYPE)); + linkset.setCollection(getLinksetRelationsByType(linksetNodes, LinksetRelationType.COLLECTION)); + linkset.setLicense(getLinksetRelationsByType(linksetNodes, LinksetRelationType.LICENSE)); + linkset.setCiteAs(getLinksetRelationsByType(linksetNodes, LinksetRelationType.CITE_AS)); + if (!linksetNodes.isEmpty()) { + linkset.setAnchor(linksetNodes.get(0).getAnchor()); + } + return linkset; + } + + private static List getLinksetRelationsByType(List linkset, + LinksetRelationType type) { + return linkset.stream() + .filter(linksetNode -> type.equals(linksetNode.getRelation())) + .map(linksetNode -> new LinksetRelation(linksetNode.getLink(), linksetNode.getType())) + .collect(Collectors.toList()); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 13f578650516..39a7ef664ba6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -15,6 +15,7 @@ import java.io.InputStream; import java.text.MessageFormat; +import java.util.List; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -22,15 +23,30 @@ import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.builder.WorkspaceItemBuilder; import org.dspace.content.Bitstream; import org.dspace.content.Collection; +import org.dspace.content.EntityType; import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; import org.dspace.content.authority.Choices; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.eperson.Group; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.util.SimpleMapConverter; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; @@ -39,7 +55,9 @@ public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { private static final String doiPattern = "https://doi.org/{0}"; + private static final String orcidPattern = "http://orcid.org/{0}"; private static final String doi = "10.1007/978-3-642-35233-1_18"; + private static final String AUTHOR = "Author"; private Collection collection; @@ -47,10 +65,22 @@ public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { private ConfigurationService configurationService; @Autowired - MetadataAuthorityService metadataAuthorityService; + private MetadataAuthorityService metadataAuthorityService; @Autowired - ChoiceAuthorityService choiceAuthorityService; + private ChoiceAuthorityService choiceAuthorityService; + + @Autowired + private ItemService itemService; + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private RelationshipTypeService relationshipTypeService; + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; @Before @Override @@ -103,6 +133,60 @@ public void findOneItemJsonLinksets() throws Exception { Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); } + @Test + public void findOneItemJsonLinksetsWithType() throws Exception { + String articleUri = mapConverterDSpaceToSchemaOrgUri.getValue("Article"); + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .withType("Article") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$.linkset[0].type", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].type[0].href", + Matchers.hasToString("https://schema.org/AboutPage"))) + .andExpect(jsonPath("$.linkset[0].type[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[0].type[1].href", + Matchers.hasToString(articleUri))) + .andExpect(jsonPath("$.linkset[0].type[1].type", + Matchers.hasToString("text/html"))); + } + + @Test + public void findOneItemJsonLinksetsWithLicence() throws Exception { + String licenceUrl = "https://exmple.com/licence"; + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata(MetadataSchemaEnum.DC.getName(), "rights", "uri", licenceUrl) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].type[0].href", + Matchers.hasToString("https://schema.org/AboutPage"))) + .andExpect(jsonPath("$.linkset[0].type[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[0].license[0].href", + Matchers.hasToString(licenceUrl))) + .andExpect(jsonPath("$.linkset[0].license[0].type", + Matchers.hasToString("text/html"))); + + } + @Test public void findOneItemJsonLinksetsWithBitstreams() throws Exception { String bitstream1Content = "ThisIsSomeDummyText"; @@ -150,11 +234,82 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { .andExpect(jsonPath("$.linkset[0].item[1].type", Matchers.hasToString(bitstream2MimeType))) .andExpect(jsonPath("$.linkset[0].anchor", - Matchers.hasToString(url + "/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[0].landingPage[0].href", - Matchers.hasToString(url + "/entities/publication/" + item.getID()))) - .andExpect(jsonPath("$.linkset[0].landingPage[0].type", - Matchers.hasToString("text/html"))); + Matchers.hasToString(url + "/entities/publication/" + item.getID()))); + } + + @Test + public void findOneItemThatIsInWorkspaceJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + itemService.addMetadata(context, workspaceItem.getItem(), "dc", "identifier", "doi", Item.ANY, doi); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + workspaceItem.getItem().getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + } + + @Test + public void findOneWithdrawnItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withdrawn() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + } + + @Test + public void findOneEmbargoItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withIssueDate("2017-11-18") + .withEmbargoPeriod("2 week") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + } + + @Test + public void findOneRestrictedItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withReaderGroup(internalGroup) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); } @Test @@ -183,6 +338,20 @@ public void findOneBitstreamJsonLinksets() throws Exception { .andExpect(status().isNotFound()); } + @Test + public void findOneCollectionJsonLinksets() throws Exception { + getClient().perform(get("/signposting/linksets/" + collection.getID() + "/json") + .header("Accept", "application/linkset+json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneCommunityJsonLinksets() throws Exception { + getClient().perform(get("/signposting/linksets/" + parentCommunity.getID() + "/json") + .header("Accept", "application/linkset+json")) + .andExpect(status().isNotFound()); + } + @Test public void findOneItemLsetLinksets() throws Exception { String bitstream1Content = "ThisIsSomeDummyText"; @@ -204,34 +373,38 @@ public void findOneItemLsetLinksets() throws Exception { context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); - String expectedResponse = "<" + MessageFormat.format(doiPattern, doi) + "> ; rel=\"cite-as\" ; anchor=\"" + - url + "/handle/" + item.getHandle() + "\" , <" + url + "/entities/publication/" + item.getID() + - "> ; rel=\"landing page\" ; type=\"text/html\" ; anchor=\"" + url + "/handle/" + item.getHandle() + - "\" , <" + url + "/bitstreams/" + bitstream1.getID() + "/download> ; rel=\"item\" ; " + - "type=\"text/plain\" ; anchor=\"" + url + "/handle/" + item.getHandle() + "\" ,"; + String siteAsRelation = "<" + MessageFormat.format(doiPattern, doi) + "> ; rel=\"cite-as\" ; anchor=\"" + + url + "/entities/publication/" + item.getID() + "\" ,"; + String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + + "/download> ; rel=\"item\" ; " + "type=\"text/plain\" ; anchor=\"" + url + "/entities/publication/" + + item.getID() + "\" ,"; + String typeRelation = " ; rel=\"type\" ; type=\"text/html\" ; anchor=\"" + + url + "/entities/publication/" + + item.getID() + "\" ,"; getClient().perform(get("/signposting/linksets/" + item.getID()) .header("Accept", "application/linkset")) - .andExpect(content().string(expectedResponse)); + .andExpect(content().string(Matchers.containsString(siteAsRelation))) + .andExpect(content().string(Matchers.containsString(itemRelation))) + .andExpect(content().string(Matchers.containsString(typeRelation))); } @Test public void findTypedLinkForItem() throws Exception { - configurationService.setProperty("choices.plugin.dc.contributor.author", "SolrAuthorAuthority"); - configurationService.setProperty("authority.controlled.dc.contributor.author", "true"); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - String bitstreamContent = "ThisIsSomeDummyText"; String bitstreamMimeType = "text/plain"; String orcidValue = "orcidValue"; context.turnOffAuthorisationSystem(); + Item author = ItemBuilder.createItem(context, collection) - .withType("John") + .withMetadata("dspace", "entity", "type", AUTHOR) .withMetadata(PERSON.getName(), "identifier", "orcid", orcidValue) .build(); - Item item = ItemBuilder.createItem(context, collection) + List metadata = itemService.getMetadata(author, "dspace", "entity", + "type", Item.ANY, false); + itemService.removeMetadataValues(context, author, List.of(metadata.get(0))); + Item publication = ItemBuilder.createItem(context, collection) .withTitle("Item Test") .withMetadata("dc", "identifier", "doi", doi) .withAuthor("John", author.getID().toString(), Choices.CF_ACCEPTED) @@ -239,42 +412,82 @@ public void findTypedLinkForItem() throws Exception { Bitstream bitstream = null; try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - bitstream = BitstreamBuilder.createBitstream(context, item, is) + bitstream = BitstreamBuilder.createBitstream(context, publication, is) .withName("Bitstream") .withDescription("description") .withMimeType(bitstreamMimeType) .build(); } + + EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, AUTHOR).build(); + RelationshipType isAuthorOfPublicationRelationshipType = + RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publicationEntityType, authorEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null).build(); + isAuthorOfPublicationRelationshipType.setTilted(RelationshipType.Tilted.LEFT); + isAuthorOfPublicationRelationshipType = + relationshipTypeService.create(context, isAuthorOfPublicationRelationshipType); + RelationshipBuilder.createRelationshipBuilder(context, publication, author, + isAuthorOfPublicationRelationshipType).build(); + context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); - getClient().perform(get("/signposting/links/" + item.getID()) + getClient().perform(get("/signposting/links/" + publication.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.hasSize(4))) - .andExpect(jsonPath("$[0].href", - Matchers.hasToString(url + "/entities/publication/" + item.getID()))) - .andExpect(jsonPath("$[0].rel", - Matchers.hasToString("landing page"))) - .andExpect(jsonPath("$[0].type", - Matchers.hasToString("text/html"))) - .andExpect(jsonPath("$[1].href", - Matchers.hasToString(url + "/bitstreams/" + bitstream.getID() + "/download"))) - .andExpect(jsonPath("$[1].rel", - Matchers.hasToString("item"))) - .andExpect(jsonPath("$[1].type", - Matchers.hasToString("text/plain"))) - .andExpect(jsonPath("$[2].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) - .andExpect(jsonPath("$[2].rel", - Matchers.hasToString("cite-as"))) - .andExpect(jsonPath("$[3].href", - Matchers.hasToString(url + "/entities/publication/" + author.getID()))) - .andExpect(jsonPath("$[3].rel", - Matchers.hasToString("author"))) - .andExpect(jsonPath("$[3].type", - Matchers.hasToString(orcidValue))); + .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + + "&& @.rel == 'author' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(doiPattern, doi) + "' " + + "&& @.rel == 'cite-as')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/bitstreams/" + bitstream.getID() + "/download' " + + "&& @.rel == 'item' " + + "&& @.type == 'text/plain')]").exists()) + .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + + "&& @.rel == 'type' " + + "&& @.type == 'text/html')]").exists()); + } + + @Test + public void findTypedLinkForBitstream() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID()) + .header("Accept", "application/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(3))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -282,11 +495,61 @@ public void findTypedLinkForItem() throws Exception { } @Test - public void findTypedLinkForBitstream() throws Exception { + public void findTypedLinkForBitstreamWithType() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + bitstreamService.addMetadata(context, bitstream, "dc", "type", null, Item.ANY, "Article"); + + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID()) + .header("Accept", "application/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(4))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()) + .andExpect(jsonPath("$[?(@.href == 'https://schema.org/ScholarlyArticle' " + + "&& @.rel == 'type' " + + "&& @.type == 'text/html')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForRestrictedBitstream() throws Exception { String bitstreamContent = "ThisIsSomeDummyText"; String bitstreamMimeType = "text/plain"; context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); Item item = ItemBuilder.createItem(context, collection) .withTitle("Item Test") .withMetadata("dc", "identifier", "doi", doi) @@ -298,35 +561,113 @@ public void findTypedLinkForBitstream() throws Exception { .withName("Bitstream") .withDescription("description") .withMimeType(bitstreamMimeType) + .withReaderGroup(internalGroup) .build(); } context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); - String serverUrl = configurationService.getProperty("dspace.server.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.hasSize(3))) - .andExpect(jsonPath("$[0].href", - Matchers.hasToString(uiUrl + "/entities/publication/" + item.getID()))) - .andExpect(jsonPath("$[0].rel", - Matchers.hasToString("collection"))) - .andExpect(jsonPath("$[0].type", - Matchers.hasToString("text/html"))) - .andExpect(jsonPath("$[1].href", - Matchers.hasToString(serverUrl + "/signposting/linksets/" + item.getID()))) - .andExpect(jsonPath("$[1].rel", - Matchers.hasToString("linkset"))) - .andExpect(jsonPath("$[1].type", - Matchers.hasToString("application/linkset"))) - .andExpect(jsonPath("$[2].href", - Matchers.hasToString(serverUrl + "/signposting/linksets/" + item.getID() + "/json"))) - .andExpect(jsonPath("$[2].rel", - Matchers.hasToString("linkset"))) - .andExpect(jsonPath("$[2].type", - Matchers.hasToString("application/linkset+json"))); + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamUnderEmbargo() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withIssueDate("2017-10-17") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .withEmbargoPeriod("6 months") + .build(); + } + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID()) + .header("Accept", "application/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(3))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + Item item = workspaceItem.getItem(); + itemService.addMetadata(context, item, "dc", "identifier", "doi", Item.ANY, doi); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, workspaceItem.getItem(), is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID()) + .header("Accept", "application/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(3))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/items/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); diff --git a/dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties b/dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties new file mode 100644 index 000000000000..e2fef507b77a --- /dev/null +++ b/dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties @@ -0,0 +1,23 @@ +# Mapping between DSpace common publication's types and a schema.org URI +Animation = https://schema.org/3DModel +Article = https://schema.org/ScholarlyArticle +Book = https://schema.org/Book +Book\ chapter = https://schema.org/Chapter +Dataset = https://schema.org/Dataset +Learning\ Object = https://schema.org/LearningResource +Image = https://schema.org/ImageObject +Image,\ 3-D = https://schema.org/3DModel +Map = https://schema.org/Map +Musical\ Score = https://schema.org/MusicComposition +Plan\ or\ blueprint = https://schema.org/Map +Preprint = https://schema.org/VisualArtwork +Presentation = https://schema.org/PresentationDigitalDocument +Recording,\ acoustical = https://schema.org/MusicRecording +Recording,\ musical = https://schema.org/MusicRecording +Recording,\ oral = https://schema.org/MusicRecording +Software = https://schema.org/SoftwareApplication +Technical\ Report = https://schema.org/Report +Thesis = https://schema.org/Thesis +Video = https://schema.org/VideoObject +Working\ Paper = https://schema.org/TechArticle +Other = https://schema.org/CreativeWork \ No newline at end of file diff --git a/dspace/config/spring/rest/signposting.xml b/dspace/config/spring/rest/signposting.xml index 33e73f3b77e1..ed0c8912eb00 100644 --- a/dspace/config/spring/rest/signposting.xml +++ b/dspace/config/spring/rest/signposting.xml @@ -1,46 +1,40 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - - - - - + + + + + - - - - - + + + + - - - + + + + - + - + - - - + - - - + - + - - - - - - - + + From bfbf17f5772659414757565f1a8378dedd682167 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Wed, 31 May 2023 12:33:56 +0300 Subject: [PATCH 250/686] - refactoring and adding new test cases - update resource polices of derivative bitstreams also - check authorization when run script - added some log information --- .../bulkaccesscontrol/BulkAccessControl.java | 158 +++- .../BulkAccessControlCli.java | 48 ++ ...lkAccessControlCliScriptConfiguration.java | 23 + .../BulkAccessControlScriptConfiguration.java | 2 +- .../model/AccessConditionBitstream.java | 5 + .../model/AccessConditionItem.java | 5 + .../model/AccessControl.java | 40 - .../model/BulkAccessControlInput.java | 72 ++ .../mediafilter/MediaFilterServiceImpl.java | 82 +- .../service/MediaFilterService.java | 18 + .../dspace/content/service/ItemService.java | 7 +- .../BulkAccessControlIT.java | 704 +++++++++++++++++- .../org/dspace/builder/CollectionBuilder.java | 25 + 13 files changed, 1077 insertions(+), 112 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index 5c32484ec774..86061559798d 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -7,6 +7,7 @@ */ package org.dspace.app.bulkaccesscontrol; +import static org.apache.commons.collections4.CollectionUtils.isEmpty; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; @@ -15,12 +16,15 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.TimeZone; import java.util.UUID; import java.util.function.Function; @@ -28,15 +32,16 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.cli.ParseException; -import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException; import org.dspace.app.bulkaccesscontrol.model.AccessCondition; import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream; import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem; -import org.dspace.app.bulkaccesscontrol.model.AccessControl; import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput; import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; import org.dspace.app.util.DSpaceObjectUtilsImpl; import org.dspace.app.util.service.DSpaceObjectUtils; import org.dspace.authorize.AuthorizeException; @@ -56,7 +61,10 @@ import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.submit.model.AccessConditionOption; import org.dspace.utils.DSpace; @@ -84,6 +92,12 @@ public class BulkAccessControl extends DSpaceRunnable itemAccessConditions; private Map uploadAccessConditions; @@ -92,6 +106,10 @@ public class BulkAccessControl extends DSpaceRunnable"); throw new BulkAccessControlException("wrong value for item mode<" + mode + ">"); - } else if (ADD_MODE.equals(mode) && CollectionUtils.isEmpty(accessConditions)) { + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">"); throw new BulkAccessControlException( "accessConditions of item must be provided with mode<" + ADD_MODE + ">"); @@ -208,7 +241,7 @@ private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQ } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { handler.logError("wrong value for bitstream mode<" + mode + ">"); throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">"); - } else if (ADD_MODE.equals(mode) && CollectionUtils.isEmpty(accessConditions)) { + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); throw new BulkAccessControlException( "accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); @@ -252,7 +285,7 @@ private void validateAccessCondition(AccessCondition accessCondition) { } } - public void updateItemsAndBitstreamsPolices(AccessControl accessControl) + public void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl) throws SQLException, SearchServiceException, AuthorizeException { int counter = 0; @@ -328,18 +361,23 @@ private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { return discoverQuery; } - private void updateItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { + private void updateItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + AccessConditionItem acItem = accessControl.getItem(); - if (REPLACE_MODE.equals(accessControl.getItem().getMode())) { + if (REPLACE_MODE.equals(acItem.getMode())) { removeReadPolicies(item, TYPE_CUSTOM); removeReadPolicies(item, TYPE_INHERITED); } setItemPolicies(item, accessControl); + logInfo(acItem.getAccessConditions(), acItem.getMode(), item); } - private void setItemPolicies(Item item, AccessControl accessControl) throws SQLException, AuthorizeException { - AccessConditionItem itemControl = accessControl.getItem(); + private void setItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + accessControl .getItem() .getAccessConditions() @@ -349,7 +387,7 @@ private void setItemPolicies(Item item, AccessControl accessControl) throws SQLE itemService.adjustItemPolicies(context, item, item.getOwningCollection()); } - private void updateBitstreamsPolicies(Item item, AccessControl accessControl) { + private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) { AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); item.getBundles(CONTENT_BUNDLE_NAME).stream() @@ -367,15 +405,18 @@ private boolean containsConstraints(AccessConditionBitstream bitstream) { isNotEmpty(bitstream.getConstraints().getUuid()); } - private void updateBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) { + private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) { + + AccessConditionBitstream acBitstream = accessControl.getBitstream(); - if (REPLACE_MODE.equals(accessControl.getBitstream().getMode())) { + if (REPLACE_MODE.equals(acBitstream.getMode())) { removeReadPolicies(bitstream, TYPE_CUSTOM); removeReadPolicies(bitstream, TYPE_INHERITED); } try { setBitstreamPolicies(bitstream, item, accessControl); + logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream); } catch (SQLException | AuthorizeException e) { throw new RuntimeException(e); } @@ -390,14 +431,16 @@ private void removeReadPolicies(DSpaceObject dso, String type) { } } - private void setBitstreamPolicies(Bitstream bitstream, Item item, AccessControl accessControl) + private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) throws SQLException, AuthorizeException { - AccessConditionBitstream bitstreamControl = accessControl.getBitstream(); + accessControl.getBitstream() .getAccessConditions() .forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition, uploadAccessConditions.get(accessCondition.getName()))); + itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); + mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream); } private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, @@ -415,25 +458,84 @@ private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondit } } - private void assignCurrentUserInContext() throws SQLException { - UUID uuid = getEpersonIdentifier(); - if (uuid != null) { - EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); - context.setCurrentUser(ePerson); + /** + * Set the eperson in the context + * + * @param context the context + * @throws SQLException if database error + */ + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); } + + context.setCurrentUser(myEPerson); } - private void assignSpecialGroupsInContext() { - for (UUID uuid : handler.getSpecialGroups()) { - context.setSpecialGroup(uuid); + private void logInfo(List accessConditions, String mode, DSpaceObject dso) { + String type = dso.getClass().getSimpleName(); + + if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies"); + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + return; } + + StringBuilder message = new StringBuilder(); + message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ") + .append(type) + .append(" {") + .append(dso.getID()) + .append("} policy") + .append(mode.equals(ADD_MODE) ? " with " : " to ") + .append("access conditions:"); + + AppendAccessConditionsInfo(message, accessConditions); + + handler.logInfo(message.toString()); + + if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) { + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + } + } + + private void AppendAccessConditionsInfo(StringBuilder message, List accessConditions) { + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + message.append("{"); + + for (int i = 0; i < accessConditions.size(); i++) { + message.append(accessConditions.get(i).getName()); + + Optional.ofNullable(accessConditions.get(i).getStartDate()) + .ifPresent(date -> message.append(", start_date=" + dateFormat.format(date))); + + Optional.ofNullable(accessConditions.get(i).getEndDate()) + .ifPresent(date -> message.append(", end_date=" + dateFormat.format(date))); + + if (i != accessConditions.size() - 1) { + message.append(", "); + } + } + + message.append("}"); + } + + private boolean isAppendModeEnabled() { + return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode"); + } + + protected boolean isAuthorized(Context context) { + return true; } @Override @SuppressWarnings("unchecked") public BulkAccessControlScriptConfiguration getScriptConfiguration() { return new DSpace().getServiceManager() - .getServiceByName("bulk-access-control",BulkAccessControlScriptConfiguration.class); + .getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class); } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java index 054c642298d4..4e8cfe480eeb 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java @@ -7,6 +7,16 @@ */ package org.dspace.app.bulkaccesscontrol; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.scripts.DSpaceCommandLineParameter; + /** * Extension of {@link BulkAccessControl} for CLI. * @@ -15,4 +25,42 @@ */ public class BulkAccessControlCli extends BulkAccessControl { + @Override + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson; + eperson = commandLine.getOptionValue('e'); + + if (eperson == null) { + handler.logError("An eperson to do the the Bulk Access Control must be specified " + + "(run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified"); + } + + if (StringUtils.contains(eperson, '@')) { + myEPerson = epersonService.findByEmail(context, eperson); + } else { + myEPerson = epersonService.find(context, UUID.fromString(eperson)); + } + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)"); + throw new UnsupportedOperationException("EPerson cannot be found: " + eperson); + } + + context.setCurrentUser(myEPerson); + } + + @Override + protected boolean isAuthorized(Context context) { + + if (context.getCurrentUser() == null) { + return false; + } + + return getScriptConfiguration().isAllowedToExecute(context, + Arrays.stream(commandLine.getOptions()) + .map(option -> + new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue())) + .collect(Collectors.toList())); + } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java index 25b631c51f46..e329d8894a37 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java @@ -7,6 +7,10 @@ */ package org.dspace.app.bulkaccesscontrol; +import java.io.InputStream; + +import org.apache.commons.cli.Options; + /** * Extension of {@link BulkAccessControlScriptConfiguration} for CLI. * @@ -16,4 +20,23 @@ public class BulkAccessControlCliScriptConfiguration extends BulkAccessControlScriptConfiguration { + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("e", "eperson", true, "email of eperson doing importing"); + options.getOption("e").setRequired(true); + + options.addOption("h", "help", false, "help"); + + return options; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java index 1fa6e12abd33..590a7eaa7957 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -76,7 +76,7 @@ public Options getOptions() { if (options == null) { Options options = new Options(); - options.addOption("u", "target", true, "target uuids of communities/collections/items"); + options.addOption("u", "uuids", true, "target uuids of communities/collections/items"); options.getOption("u").setType(String.class); options.getOption("u").setRequired(true); diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java index 46354ddff41c..2176e24d7f9d 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -10,7 +10,12 @@ import java.util.ArrayList; import java.util.List; +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + /** + * Class that model the value of bitstream node + * from json file of the {@link BulkAccessControl} + * * @author Mohamed Eskander (mohamed.eskander at 4science.it) */ public class AccessConditionBitstream { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java index 10f17c1c01bf..c482dfc34d65 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java @@ -10,7 +10,12 @@ import java.util.ArrayList; import java.util.List; +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + /** + * Class that model the value of item node + * from json file of the {@link BulkAccessControl} + * * @author Mohamed Eskander (mohamed.eskander at 4science.it) */ public class AccessConditionItem { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java deleted file mode 100644 index 8e3c11001fa8..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessControl.java +++ /dev/null @@ -1,40 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.bulkaccesscontrol.model; - -public class AccessControl { - - AccessConditionItem item; - - AccessConditionBitstream bitstream; - - public AccessControl() { - } - - public AccessControl(AccessConditionItem item, - AccessConditionBitstream bitstream) { - this.item = item; - this.bitstream = bitstream; - } - - public AccessConditionItem getItem() { - return item; - } - - public void setItem(AccessConditionItem item) { - this.item = item; - } - - public AccessConditionBitstream getBitstream() { - return bitstream; - } - - public void setBitstream(AccessConditionBitstream bitstream) { - this.bitstream = bitstream; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java new file mode 100644 index 000000000000..0f8852a71f7d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the content of the JSON file used as input for the {@link BulkAccessControl} + * + *
+ * {
+ * item: {
+ * mode: "replace",
+ * accessConditions: [
+ * {
+ * "name": "openaccess"
+ * }
+ * ]
+ * },
+ * bitstream: {
+ * constraints: {
+ * uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN],
+ * },
+ * mode: "add",
+ * accessConditions: [
+ * {
+ * "name": "embargo",
+ * "startDate": "2024-06-24T23:59:59.999+0000"
+ * }
+ * ]
+ * }
+ * } + *
+ * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlInput { + + AccessConditionItem item; + + AccessConditionBitstream bitstream; + + public BulkAccessControlInput() { + } + + public BulkAccessControlInput(AccessConditionItem item, + AccessConditionBitstream bitstream) { + this.item = item; + this.bitstream = bitstream; + } + + public AccessConditionItem getItem() { + return item; + } + + public void setItem(AccessConditionItem item) { + this.item = item; + } + + public AccessConditionBitstream getBitstream() { + return bitstream; + } + + public void setBitstream(AccessConditionBitstream bitstream) { + this.bitstream = bitstream; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index 88061d1d4d37..e2c6c9c5db06 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -8,13 +8,17 @@ package org.dspace.app.mediafilter; import java.io.InputStream; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; @@ -388,18 +392,7 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo bitstreamService.update(context, b); //Set permissions on the derivative bitstream - //- First remove any existing policies - authorizeService.removeAllPolicies(context, b); - - //- Determine if this is a public-derivative format - if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { - //- Set derivative bitstream to be publicly accessible - Group anonymous = groupService.findByName(context, Group.ANONYMOUS); - authorizeService.addPolicy(context, b, Constants.READ, anonymous); - } else { - //- replace the policies using the same in the source bitstream - authorizeService.replaceAllPolicies(context, source, b); - } + updatePoliciesOfDerivativeBitstream(context, b, formatFilter, source); //do post-processing of the generated bitstream formatFilter.postProcessBitstream(context, item, b); @@ -421,6 +414,71 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo return true; } + @Override + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException { + + if (filterClasses == null) { + return; + } + + for (FormatFilter formatFilter : filterClasses) { + for (Bitstream bitstream : findDerivativeBitstreams(item, source, formatFilter)) { + updatePoliciesOfDerivativeBitstream(context, bitstream, formatFilter, source); + } + } + } + + /** + * find derivative bitstreams related to source bitstream + * + * @param item item containing bitstreams + * @param source source bitstream + * @param formatFilter formatFilter + * @return list of derivative bitstreams from source bitstream + * @throws SQLException If something goes wrong in the database + */ + private List findDerivativeBitstreams(Item item, Bitstream source, FormatFilter formatFilter) + throws SQLException { + + String bitstreamName = formatFilter.getFilteredName(source.getName()); + List bundles = itemService.getBundles(item, formatFilter.getBundleName()); + + return bundles.stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .filter(bitstream -> + StringUtils.equals(bitstream.getName().trim(), bitstreamName.trim())) + .collect(Collectors.toList()); + } + + /** + * update resource polices of derivative bitstreams. + * by remove all resource policies and + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context the context + * @param bitstream derivative bitstream + * @param formatFilter formatFilter + * @param source the source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + private void updatePoliciesOfDerivativeBitstream(Context context, Bitstream bitstream, FormatFilter formatFilter, + Bitstream source) throws SQLException, AuthorizeException { + + authorizeService.removeAllPolicies(context, bitstream); + + if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + authorizeService.addPolicy(context, bitstream, Constants.READ, anonymous); + } else { + authorizeService.replaceAllPolicies(context, source, bitstream); + } + } + @Override public Item getCurrentItem() { return currentItem; diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java index 50a6bb3a2027..bc92ff521098 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java @@ -7,10 +7,12 @@ */ package org.dspace.app.mediafilter.service; +import java.sql.SQLException; import java.util.List; import java.util.Map; import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -91,6 +93,22 @@ public void applyFiltersCollection(Context context, Collection collection) public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter) throws Exception; + /** + * update resource polices of derivative bitstreams + * related to source bitstream. + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context context + * @param item item containing bitstreams + * @param source source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException; + /** * Return the item that is currently being processed/filtered * by the MediaFilterManager. diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index 02d83808c1ff..b6bf7aa5cfa2 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -508,18 +508,19 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection throws SQLException, AuthorizeException; /** - * Adjust the Bundle and Bitstream policies to reflect what have been defined + * Adjust the Bitstream policies to reflect what have been defined * during the submission/workflow. The temporary SUBMISSION and WORKFLOW * policies are removed and the policies defined at the item and collection * level are copied and inherited as appropriate. Custom selected Item policies - * are copied to the bundle/bitstream only if no explicit custom policies were - * already applied to the bundle/bitstream. Collection's policies are inherited + * are copied to the bitstream only if no explicit custom policies were + * already applied to the bitstream. Collection's policies are inherited * if there are no other policies defined or if the append mode is defined by * the configuration via the core.authorization.installitem.inheritance-read.append-mode property * * @param context DSpace context object * @param item Item to adjust policies on * @param collection Collection + * @param bitstream Bitstream to adjust policies on * @throws SQLException If database error * @throws AuthorizeException If authorization error */ diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index c1d6a0db64ca..6335ecd417f7 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -9,13 +9,16 @@ import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; +import static org.dspace.core.Constants.DEFAULT_BUNDLE_NAME; import static org.dspace.core.Constants.READ; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -26,30 +29,39 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.PathUtils; +import org.apache.commons.lang3.ArrayUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.authorize.ResourcePolicy; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; import org.dspace.core.Constants; +import org.dspace.core.SelfNamedPlugin; +import org.dspace.core.factory.CoreServiceFactory; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; @@ -58,6 +70,8 @@ import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -68,12 +82,22 @@ * @author Mohamed Eskander (mohamed.eskander at 4science.it) */ public class BulkAccessControlIT extends AbstractIntegrationTestWithDatabase { + + //key (in dspace.cfg) which lists all enabled filters by name + private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; + + //prefix (in dspace.cfg) for all filter properties + private static final String FILTER_PREFIX = "filter"; + + //suffix (in dspace.cfg) for input formats supported by each filter + private static final String INPUT_FORMATS_SUFFIX = "inputFormats"; + private Path tempDir; private String tempFilePath; - private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); private SearchService searchService = SearchUtils.getSearchService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @Before @Override @@ -92,12 +116,300 @@ public void destroy() throws Exception { super.destroy(); } + @Test + public void performBulkAccessWithAnonymousEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("An eperson to do the the Bulk Access Control must be specified") + )); + } + + @Test + public void performBulkAccessWithNotExistingEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + String randomUUID = UUID.randomUUID().toString(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", randomUUID}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("EPerson cannot be found: " + randomUUID) + )); + } + + @Test + public void performBulkAccessWithNotAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithCommunityAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", community.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithCollectionAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", collection.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithItemAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithNotCollectionAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // add eperson to admin group + Collection collectionOne = CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + ItemBuilder.createItem(context, collectionOne).build(); + ItemBuilder.createItem(context, collectionTwo).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", collectionOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + @Test public void performBulkAccessWithoutRequiredParamTest() throws Exception { buildJsonFile(""); - String[] args = new String[] {"bulk-access-control", "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-f", tempFilePath, "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -128,7 +440,8 @@ public void performBulkAccessWithEmptyJsonTest() throws Exception { buildJsonFile(""); - String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -168,7 +481,8 @@ public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { buildJsonFile(json); - String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -179,8 +493,25 @@ public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( containsString("wrong value for item mode") )); + } + + @Test + public void performBulkAccessWithMissingModeOfItemValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); - json = "{ \"item\": {\n" + + String json = "{ \"item\": {\n" + " \"accessConditions\": [\n" + " {\n" + " \"name\": \"openaccess\"\n" + @@ -190,9 +521,10 @@ public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { buildJsonFile(json); - args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; - testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); @@ -230,7 +562,8 @@ public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exceptio buildJsonFile(json); - String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -241,8 +574,25 @@ public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exceptio assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( containsString("wrong value for bitstream mode") )); + } - json = "{ \"bitstream\": {\n" + + @Test + public void performBulkAccessWithMissingModeOfBitstreamValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"bitstream\": {\n" + " \"accessConditions\": [\n" + " {\n" + " \"name\": \"openaccess\"\n" + @@ -252,9 +602,10 @@ public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exceptio buildJsonFile(json); - args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; - testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); @@ -291,7 +642,8 @@ public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Except buildJsonFile(json); - String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -305,7 +657,7 @@ public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Except } @Test - public void performBulkAccessWithInvalidAccessConditionDateTest() throws Exception { + public void performBulkAccessWithInvalidEmbargoAccessConditionDateTest() throws Exception { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context) @@ -332,7 +684,8 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti buildJsonFile(jsonOne); - String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -343,8 +696,25 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( containsString("invalid access condition, The access condition embargo requires a start date.") )); + } + + @Test + public void performBulkAccessWithInvalidLeaseAccessConditionDateTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); - String jsonTwo = "{ \"item\": {\n" + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + " \"mode\": \"add\",\n" + " \"accessConditions\": [\n" + " {\n" + @@ -354,11 +724,12 @@ public void performBulkAccessWithInvalidAccessConditionDateTest() throws Excepti " ]\n" + " }}\n"; - buildJsonFile(jsonTwo); + buildJsonFile(json); - args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; - testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); @@ -398,7 +769,8 @@ public void performBulkAccessForCommunityItemsWithBitstreamConstraintsTest() thr String[] args = new String[] {"bulk-access-control", "-u", communityOne.getID().toString(), - "-f", tempFilePath}; + "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -444,7 +816,8 @@ public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() thro new String[] {"bulk-access-control", "-u", communityOne.getID().toString(), "-u", communityTwo.getID().toString(), - "-f", tempFilePath}; + "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -511,13 +884,19 @@ public void performBulkAccessForSingleItemWithBitstreamConstraintsTest() throws String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), - "-f", tempFilePath}; + "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(1)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Bitstream {" + bitstreamOne.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"))); bitstreamOne = context.reloadEntity(bitstreamOne); bitstreamTwo = context.reloadEntity(bitstreamTwo); @@ -553,7 +932,8 @@ public void performBulkAccessWithAddModeAndEmptyAccessConditionsTest() throws Ex buildJsonFile(jsonOne); String[] args = - new String[] {"bulk-access-control", "-u", parentCommunity.getID().toString(), "-f", tempFilePath}; + new String[] {"bulk-access-control", "-u", parentCommunity.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); @@ -624,7 +1004,8 @@ public void performBulkAccessWithValidJsonTest() throws Exception { "-u", subCommunityOne.getID().toString(), "-u", collectionTwo.getID().toString(), "-u", itemThree.getID().toString(), - "-f", tempFilePath + "-f", tempFilePath, + "-e", admin.getEmail() }; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); @@ -632,6 +1013,16 @@ public void performBulkAccessWithValidJsonTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(3)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Item {" + itemOne.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"), + containsString("Replacing Item {" + itemTwo.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"), + containsString("Replacing Item {" + itemThree.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}") + )); itemOne = context.reloadEntity(itemOne); itemTwo = context.reloadEntity(itemTwo); @@ -726,7 +1117,8 @@ public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throw "bulk-access-control", "-u", subCommunityOne.getID().toString(), "-u", collectionTwo.getID().toString(), - "-f", tempFilePath + "-f", tempFilePath, + "-e", admin.getEmail() }; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); @@ -734,6 +1126,7 @@ public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throw assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(60)); List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); List itemsOfSubCommTwo = findItems("location.comm:" + subCommunityTwo.getID()); @@ -762,6 +1155,11 @@ public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throw matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) )); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItems( + containsString("Cleaning Item {" + item.getID() + "} policies"), + containsString("Inheriting policies from owning Collection in Item {" + item.getID() + "") + )); + List bitstreams = findAllBitstreams(item); for (Bitstream bitstream : bitstreams) { @@ -769,6 +1167,11 @@ public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throw assertThat(bitstream.getResourcePolicies(), hasItem( matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItems( + containsString("Cleaning Bitstream {" + bitstream.getID() + "} policies"), + containsString("Inheriting policies from owning Collection in Bitstream {" + bitstream.getID() + "") + )); } } } @@ -840,7 +1243,8 @@ public void performBulkAccessWithAddModeTest() throws Exception { String[] args = new String[] { "bulk-access-control", "-u", subCommunityOne.getID().toString(), - "-f", tempFilePath + "-f", tempFilePath, + "-e", admin.getEmail() }; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); @@ -848,6 +1252,7 @@ public void performBulkAccessWithAddModeTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(10)); List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); @@ -945,7 +1350,8 @@ public void performBulkAccessWithReplaceModeTest() throws Exception { String[] args = new String[] { "bulk-access-control", "-u", subCommunityOne.getID().toString(), - "-f", tempFilePath + "-f", tempFilePath, + "-e", admin.getEmail() }; TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); @@ -953,6 +1359,7 @@ public void performBulkAccessWithReplaceModeTest() throws Exception { assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(6)); List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); @@ -969,6 +1376,11 @@ public void performBulkAccessWithReplaceModeTest() throws Exception { matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) )); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Item {" + item.getID() + + "} policy to access conditions:{openaccess, embargo, start_date=2024-06-24}") + )); + List bitstreams = findAllBitstreams(item); for (Bitstream bitstream : bitstreams) { @@ -977,10 +1389,246 @@ public void performBulkAccessWithReplaceModeTest() throws Exception { matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Bitstream {" + bitstream.getID() + + "} policy to access conditions:{openaccess, lease, end_date=2023-06-24}") + )); + } + } + } + + @Test + public void performBulkAccessAndCheckDerivativeBitstreamsPoliciesTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bitstream") + .withFormat("TEXT") + .withMimeType("text/plain") + .build(); + } + + List formatFilters = new ArrayList<>(); + Map> filterFormats = new HashMap<>(); + MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + + String[] filterNames = + DSpaceServicesFactory.getInstance() + .getConfigurationService() + .getArrayProperty(MEDIA_FILTER_PLUGINS_KEY); + + + for (int i = 0; i < filterNames.length; i++) { + + //get filter of this name & add to list of filters + FormatFilter filter = + (FormatFilter) CoreServiceFactory.getInstance() + .getPluginService() + .getNamedPlugin(FormatFilter.class, filterNames[i]); + formatFilters.add(filter); + + String filterClassName = filter.getClass().getName(); + + String pluginName = null; + + if (SelfNamedPlugin.class.isAssignableFrom(filter.getClass())) { + //Get the plugin instance name for this class + pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName(); } + + String[] formats = + DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty( + FILTER_PREFIX + "." + filterClassName + + (pluginName != null ? "." + pluginName : "") + + "." + INPUT_FORMATS_SUFFIX); + + //add to internal map of filters to supported formats + if (ArrayUtils.isNotEmpty(formats)) { + filterFormats.put(filterClassName + + (pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + + pluginName : ""), + Arrays.asList(formats)); + } + } + + mediaFilterService.setFilterClasses(formatFilters); + mediaFilterService.setFilterFormats(filterFormats); + + // here will create derivative bitstreams + mediaFilterService.applyFiltersItem(context, item); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + item = context.reloadEntity(item); + + Bundle originalBundle = item.getBundles(DEFAULT_BUNDLE_NAME).get(0); + Bundle textBundle = item.getBundles("TEXT").get(0); + + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(originalBundle.getBitstreams().get(0).getResourcePolicies(), hasSize(2)); + assertThat(originalBundle.getBitstreams().get(0).getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + + assertThat(textBundle.getBitstreams().get(0).getResourcePolicies(), hasSize(2)); + assertThat(textBundle.getBitstreams().get(0).getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + } + + @Test + public void performBulkAccessWithReplaceModeAndAppendModeIsEnabledTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context).withName("special network").build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection one") + .withDefaultItemRead(group) + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", item.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + try { + configurationService.setProperty("core.authorization.installitem.inheritance-read.append-mode", true); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), + testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Item {" + item.getID() + "} policy to access conditions:" + + "{embargo, start_date=2024-06-24}"), + containsString("Inheriting policies from owning Collection in Item {" + item.getID() + "}") + )); + + item = context.reloadEntity(item); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null), + matches(Constants.READ, group, TYPE_INHERITED) + )); + } finally { + configurationService.setProperty("core.authorization.installitem.inheritance-read.append-mode", false); } } + @Test + public void performBulkAccessWithHelpParamTest() throws Exception { + + String[] args = new String[] {"bulk-access-control", "-h"}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + private List findItems(String query) throws SearchServiceException { DiscoverQuery discoverQuery = new DiscoverQuery(); diff --git a/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java index 3e8a7dc9f0f5..f287c7aa8d32 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java @@ -7,6 +7,8 @@ */ package org.dspace.builder; +import static org.dspace.core.Constants.DEFAULT_ITEM_READ; + import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -15,6 +17,7 @@ import org.apache.commons.io.IOUtils; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.MetadataSchemaEnum; @@ -237,6 +240,28 @@ public CollectionBuilder withAdminGroup(EPerson... members) throws SQLException, return this; } + /** + * remove the resource policies with type DEFAULT_ITEM_READ and + * add new policy with type DEFAULT_ITEM_READ of + * the new group to current collection. + * + * @param group the group + * @return this builder + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ + public CollectionBuilder withDefaultItemRead(Group group) throws SQLException, AuthorizeException { + resourcePolicyService.removePolicies(context, collection, DEFAULT_ITEM_READ); + + ResourcePolicy resourcePolicy = resourcePolicyService.create(context); + resourcePolicy.setGroup(group); + resourcePolicy.setAction(DEFAULT_ITEM_READ); + resourcePolicy.setdSpaceObject(collection); + resourcePolicyService.update(context, resourcePolicy); + return this; + } + + @Override public Collection build() { try { From e6daf49f25ece0a46b9acf5c32e703d778e2e7f1 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Wed, 31 May 2023 13:26:28 +0300 Subject: [PATCH 251/686] added missed configuration --- .../test/data/dspaceFolder/config/spring/rest/scripts.xml | 5 +++++ dspace/config/spring/rest/scripts.xml | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index fd218aa77a8d..79927833d26e 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -43,4 +43,9 @@
+ + + + + diff --git a/dspace/config/spring/rest/scripts.xml b/dspace/config/spring/rest/scripts.xml index 0fae31d552c1..eda8c579a89c 100644 --- a/dspace/config/spring/rest/scripts.xml +++ b/dspace/config/spring/rest/scripts.xml @@ -64,4 +64,9 @@
+ + + + + From e31a2238824e79a2b60bf2cb1902352f5e3b9845 Mon Sep 17 00:00:00 2001 From: damian Date: Wed, 31 May 2023 13:53:09 +0200 Subject: [PATCH 252/686] Item counts are now managable. --- .../java/org/dspace/browse/ItemCounter.java | 20 +++++++++++++------ dspace/config/dspace.cfg | 4 ++++ 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index 27e29ad9fa1c..da88b0657ff8 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -55,6 +55,9 @@ public class ItemCounter { protected ItemService itemService; protected ConfigurationService configurationService; + private boolean showStrengths; + private boolean useCache; + /** * Construct a new item counter which will use the given DSpace Context * @@ -66,6 +69,8 @@ public ItemCounter(Context context) throws ItemCountException { this.dao = ItemCountDAOFactory.getInstance(this.context); this.itemService = ContentServiceFactory.getInstance().getItemService(); this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", true); + this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true); } public static ItemCounter getInstance() throws ItemCountException { @@ -76,18 +81,21 @@ public static ItemCounter getInstance() throws ItemCountException { } /** - * Get the count of the items in the given container. If the configuration - * value webui.strengths.cache is equal to 'true' this will return the - * cached value if it exists. If it is equal to 'false' it will count - * the number of items in the container in real time. + * Get the count of the items in the given container. If the configuration + * value webui.strengths.show is equal to 'true' this method will return all + * archived items. If the configuration value webui.strengths.cache + * is equal to 'true' this will return the cached value if it exists. + * If it is equal to 'false' it will count the number of items + * in the container in real time. * * @param dso DSpaceObject * @return count * @throws ItemCountException when error occurs */ public int getCount(DSpaceObject dso) throws ItemCountException { - boolean useCache = configurationService.getBooleanProperty( - "webui.strengths.cache", true); + if (!showStrengths) { + return 0; + } if (useCache) { return dao.getCount(dso); diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 65b1f951fad3..2e7ad1636bce 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1066,6 +1066,10 @@ webui.preview.brand.fontpoint = 12 ##### Settings for item count (strength) information #### +# Whether to display collection and community strengths (i.e. item counts) +#The default behaviour is to show item counts. +#webui.strengths.show = true + # Counts fetched in real time will perform an actual count of the # index contents every time a page with this feature is requested, # which may not scale as well as a cached count. From 99d4a8447ebaa3d6d641b628bc041a3fb07cb17e Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Wed, 31 May 2023 16:52:04 +0200 Subject: [PATCH 253/686] 102052: Add remaining CRUD methods to BundlePrimaryBitstreamLinkRepository --- .../BundlePrimaryBitstreamLinkRepository.java | 100 ++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java index ed580a21b746..c2f6dac5f228 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java @@ -12,9 +12,12 @@ import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.BitstreamRest; import org.dspace.app.rest.model.BundleRest; import org.dspace.app.rest.projection.Projection; +import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.service.BundleService; import org.dspace.core.Context; @@ -34,6 +37,15 @@ public class BundlePrimaryBitstreamLinkRepository extends AbstractDSpaceRestRepo @Autowired BundleService bundleService; + /** + * Retrieves the primaryBitstream of a Bundle. + * + * @param request The HttpServletRequest if relevant + * @param bundleId The UUID of the Bundle + * @param optionalPageable The pageable if relevant + * @param projection The projection to use + * @return The primaryBitstream, or null if not found + */ @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'READ')") public BitstreamRest getPrimaryBitstream(@Nullable HttpServletRequest request, UUID bundleId, @@ -53,4 +65,92 @@ public BitstreamRest getPrimaryBitstream(@Nullable HttpServletRequest request, throw new RuntimeException(e); } } + + /** + * Sets a primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + * @param bitstream The Bitstream to use as primaryBitstream + * @param projection The projection to use + * @return The Bundle + */ + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'WRITE')") + public BundleRest createPrimaryBitstream(Context context, UUID bundleId, + Bitstream bitstream, Projection projection) { + try { + Bundle bundle = setPrimaryBitstream(context, bundleId, bitstream, true); + return converter.toRest(context.reloadEntity(bundle), projection); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + /** + * Updates a primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + * @param bitstream The Bitstream to use as primaryBitstream + * @param projection The projection to use + * @return The Bundle + */ + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'WRITE')") + public BundleRest updatePrimaryBitstream(Context context, UUID bundleId, + Bitstream bitstream, Projection projection) { + try { + Bundle bundle = setPrimaryBitstream(context, bundleId, bitstream, false); + return converter.toRest(context.reloadEntity(bundle), projection); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + /** + * Deletes the primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + */ + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'DELETE')") + public void deletePrimaryBitstream(Context context, UUID bundleId) { + try { + Bundle bundle = setPrimaryBitstream(context, bundleId, null, false); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + /** + * Internal method to set the primaryBitstream on a Bundle. + * + * @param context The current DSpace context + * @param bundleId The UUID of the Bundle + * @param bitstream The Bitstream to use as primaryBitstream + * @param shouldBeSet Whether a primaryBitstream is should already be set + * @return The Bundle + */ + private Bundle setPrimaryBitstream(Context context, UUID bundleId, Bitstream bitstream, boolean shouldBeSet) + throws SQLException { + Bundle bundle = bundleService.find(context, bundleId); + if (bundle == null) { + throw new ResourceNotFoundException("No such bundle: " + bundleId); + } + if (!shouldBeSet && bundle.getPrimaryBitstream() == null) { + throw new DSpaceBadRequestException("Bundle '" + bundle.getName() + + "' does not have a primary bitstream."); + } + if (shouldBeSet && bundle.getPrimaryBitstream() != null) { + throw new DSpaceBadRequestException("Bundle '" + bundle.getName() + + "' already has a primary bitstream."); + } + if (bitstream != null && !bundle.getBitstreams().contains(bitstream)) { + throw new UnprocessableEntityException("Bundle '" + bundle.getName() + "' does not contain " + + "bitstream with id: " + bitstream.getID()); + } + + bundle.setPrimaryBitstreamID(bitstream); + context.commit(); + return bundle; + } } From 2c43270152825a775388ff2c3a9f309ce609d942 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Wed, 31 May 2023 16:53:45 +0200 Subject: [PATCH 254/686] 102052: Create PrimaryBitstreamController for primaryBitstream endpoint + ITs --- .../app/rest/PrimaryBitstreamController.java | 157 ++++++++++ .../rest/PrimaryBitstreamControllerIT.java | 296 ++++++++++++++++++ 2 files changed, 453 insertions(+) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java new file mode 100644 index 000000000000..cdcbbce7476e --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java @@ -0,0 +1,157 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; +import static org.dspace.core.Constants.BITSTREAM; + +import java.util.List; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.model.hateoas.BitstreamResource; +import org.dspace.app.rest.model.hateoas.BundleResource; +import org.dspace.app.rest.repository.BundlePrimaryBitstreamLinkRepository; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.rest.utils.Utils; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * This RestController is responsible for managing primaryBitstreams on bundles. + * The endpoint can be found at /api/core/bundles/{bundle-uuid}/primaryBitstream + */ +@RestController +@RequestMapping("/api/" + BundleRest.CATEGORY + "/" + BundleRest.PLURAL_NAME + + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/" + BundleRest.PRIMARY_BITSTREAM) +public class PrimaryBitstreamController { + + @Autowired + private BundlePrimaryBitstreamLinkRepository repository; + @Autowired + private ConverterService converter; + @Autowired + private Utils utils; + + /** + * This method retrieves a primaryBitstream on the given Bundle. + * Returns null if Bundle doesn't have a primaryBitstream. + *
+ * curl -X GET "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * + * + * @param uuid The UUID of the Bundle of which the primaryBitstream will be retrieved + * @param request The HttpServletRequest + * @return The primaryBitstream, or null if not found + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'READ')") + @RequestMapping(method = RequestMethod.GET) + public BitstreamResource getPrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + BitstreamRest bitstreamRest = repository.getPrimaryBitstream(null, uuid, null, utils.obtainProjection()); + return converter.toResource(bitstreamRest); + } + + /** + * This method creates a primaryBitstream on the given Bundle. + *
+ * curl -i -X POST "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * -H "Content-type:text/uri-list" + * -d "https://{dspace.server.url}/api/core/bitstreams/{bitstream-uuid}" + * + * + * @param uuid The UUID of the Bundle on which the primaryBitstream will be set + * @param request The HttpServletRequest + * @return The Bundle on which the primaryBitstream was set + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'WRITE')") + @RequestMapping(method = RequestMethod.POST, consumes = {"text/uri-list"}) + public ResponseEntity> createPrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + Context context = ContextUtil.obtainContext(request); + BundleRest bundleRest = repository.createPrimaryBitstream(context, uuid, + getBitstreamFromRequest(context, request), + utils.obtainProjection()); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), + (RepresentationModel) converter.toResource(bundleRest)); + } + + /** + * This method updates the primaryBitstream on the given Bundle. + *
+ * curl -i -X PUT "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * -H "Content-type:text/uri-list" + * -d "https://{dspace.server.url}/api/core/bitstreams/{bitstream-uuid}" + * + * + * @param uuid The UUID of the Bundle of which the primaryBitstream will be updated + * @param request The HttpServletRequest + * @return The Bundle of which the primaryBitstream was updated + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'WRITE')") + @RequestMapping(method = RequestMethod.PUT, consumes = {"text/uri-list"}) + public BundleResource updatePrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + Context context = ContextUtil.obtainContext(request); + BundleRest bundleRest = repository.updatePrimaryBitstream(context, uuid, + getBitstreamFromRequest(context, request), + utils.obtainProjection()); + return converter.toResource(bundleRest); + } + + /** + * This method deletes the primaryBitstream on the given Bundle. + *
+ * curl -i -X DELETE "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * + * + * @param uuid The UUID of the Bundle of which the primaryBitstream will be deleted + * @param request The HttpServletRequest + * @return The Bundle of which the primaryBitstream was deleted + */ + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'DELETE')") + @RequestMapping(method = RequestMethod.DELETE) + public ResponseEntity> deletePrimaryBitstream(@PathVariable UUID uuid, + HttpServletRequest request) { + Context context = ContextUtil.obtainContext(request); + repository.deletePrimaryBitstream(context, uuid); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + + /** + * This method parses a URI from the request body and resolves it to a Bitstream. + * + * @param context The current DSpace context + * @param request The HttpServletRequest + * @return The resolved Bitstream + */ + private Bitstream getBitstreamFromRequest(Context context, HttpServletRequest request) { + List dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request)); + if (dsoList.size() != 1 || dsoList.get(0).getType() != BITSTREAM) { + throw new UnprocessableEntityException("No bitstream has been specified " + + "or the data cannot be resolved to a bitstream."); + } + return (Bitstream) dsoList.get(0); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java new file mode 100644 index 000000000000..630ef257deaa --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -0,0 +1,296 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.InputStream; +import java.util.UUID; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.dspace.app.rest.matcher.BitstreamMatcher; +import org.dspace.app.rest.matcher.BundleMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Tests for the PrimaryBitstreamController + */ +public class PrimaryBitstreamControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + BundleService bundleService; + @Autowired + BitstreamService bitstreamService; + + Item item; + Bitstream bitstream; + Bundle bundle; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection).build(); + + // create bitstream in ORIGINAL bundle of item + String bitstreamContent = "TEST CONTENT"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + bundle = item.getBundles("ORIGINAL").get(0); + context.restoreAuthSystemState(); + } + + @Test + public void testGetPrimaryBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundleUrl(bundle.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BitstreamMatcher.matchProperties(bitstream))); + } + + @Test + public void testGetPrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundleUrl(UUID.randomUUID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testGetPrimaryBitstreamNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundleUrl(bundle.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$").doesNotExist()); + } + + @Test + public void testPostPrimaryBitstream() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle.getName(), bundle.getID(), + bundle.getHandle(), bundle.getType()))); + // verify primaryBitstream was actually added + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundleUrl(UUID.randomUUID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isNotFound()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamInvalidBitstream() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamAlreadyExists() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createSecondBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamNotInBundle() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); + Bitstream bitstream2 = createSecondBitstream(bundle2); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createSecondBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle.getName(), bundle.getID(), + bundle.getHandle(), bundle.getType()))); + // verify primaryBitstream was actually updated + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream2, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundleUrl(UUID.randomUUID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testUpdatePrimaryBitstreamInvalidBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamNotInBundle() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); + Bitstream bitstream2 = createSecondBitstream(bundle2); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testDeletePrimaryBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(bundle.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle.getBitstreams().size()); + Assert.assertEquals(bitstream, bundle.getBitstreams().get(0)); + } + + @Test + public void testDeletePrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(UUID.randomUUID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testDeletePrimaryBitstreamBundleNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(bundle.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + private String getBundleUrl(UUID uuid) { + return "/api/core/bundles/" + uuid + "/primaryBitstream"; + } + + private String getBitstreamUrl(UUID uuid) { + return "/api/core/bitstreams/" + uuid; + } + + private Bitstream createSecondBitstream(Bundle bundle) throws Exception { + String bitstreamContent = "Second Bitstream"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + return BitstreamBuilder.createBitstream(context, bundle, is) + .withName("Bitstream2") + .withMimeType("text/plain") + .build(); + } + } +} From b272b1fcab1f5340c35563c5840dadd01d4a7d33 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 31 May 2023 17:37:34 -0400 Subject: [PATCH 255/686] Make service lookup retry log at DEBUG; radically shorten the trace. --- .../servicemanager/DSpaceServiceManager.java | 19 +++++--- .../java/org/dspace/utils/CallStackUtils.java | 44 +++++++++++++++++++ 2 files changed, 56 insertions(+), 7 deletions(-) create mode 100644 dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java diff --git a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java index afd1627f5ee3..6cffa7ee66d5 100644 --- a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java +++ b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java @@ -7,6 +7,8 @@ */ package org.dspace.servicemanager; +import static org.apache.logging.log4j.Level.DEBUG; + import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; @@ -21,6 +23,8 @@ import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.kernel.Activator; import org.dspace.kernel.config.SpringLoader; import org.dspace.kernel.mixins.ConfigChangeListener; @@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceManagerReadyAware; import org.dspace.servicemanager.config.DSpaceConfigurationService; import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.dspace.utils.CallStackUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.NoSuchBeanDefinitionException; @@ -44,7 +47,7 @@ */ public final class DSpaceServiceManager implements ServiceManagerSystem { - private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class); + private static Logger log = LogManager.getLogger(); public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml"; public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml"; @@ -426,9 +429,10 @@ public T getServiceByName(String name, Class type) { service = (T) applicationContext.getBean(name, type); } catch (BeansException e) { // no luck, try the fall back option - log.warn( + log.debug( "Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", name, e); + + " Will try to look up bean by type next.", name); + CallStackUtils.logCaller(log, DEBUG); service = null; } } else { @@ -437,8 +441,9 @@ public T getServiceByName(String name, Class type) { service = (T) applicationContext.getBean(type.getName(), type); } catch (BeansException e) { // no luck, try the fall back option - log.warn("Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", type.getName(), e); + log.debug("Unable to locate bean by name or id={}." + + " Will try to look up bean by type next.", type::getName); + CallStackUtils.logCaller(log, DEBUG); service = null; } } diff --git a/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java new file mode 100644 index 000000000000..cb60a223a184 --- /dev/null +++ b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.utils; + +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; + +import java.lang.StackWalker.StackFrame; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; + +/** + * Utility methods for manipulating call stacks. + * + * @author mwood + */ +public class CallStackUtils { + private CallStackUtils() {} + + /** + * Log the class, method and line of the caller's caller. + * + * @param log logger to use. + * @param level log at this level, if enabled. + */ + static public void logCaller(Logger log, Level level) { + if (log.isEnabled(level)) { + StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE); + StackFrame caller = stack.walk(stream -> stream.skip(2) + .findFirst() + .get()); + String callerClassName = caller.getDeclaringClass().getCanonicalName(); + String callerMethodName = caller.getMethodName(); + int callerLine = caller.getLineNumber(); + log.log(level, "Called from {}.{} line {}.", + callerClassName, callerMethodName, callerLine); + } + } +} From f587989726aeb6ca96c668f931cd2c8b09a33845 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Thu, 1 Jun 2023 11:48:45 +0300 Subject: [PATCH 256/686] edited param name --- .../bulkaccesscontrol/BulkAccessControlScriptConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java index 590a7eaa7957..5196247f94cb 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -76,7 +76,7 @@ public Options getOptions() { if (options == null) { Options options = new Options(); - options.addOption("u", "uuids", true, "target uuids of communities/collections/items"); + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); options.getOption("u").setType(String.class); options.getOption("u").setRequired(true); From 56c8820d31439ce613585fadf1813ee62d45d0b1 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 1 Jun 2023 14:41:58 +0200 Subject: [PATCH 257/686] [DURACOM-126] Improve test in order to check item can be submitted --- .../java/org/dspace/builder/WorkspaceItemBuilder.java | 4 ++++ .../dspace/app/rest/WorkspaceItemRestRepositoryIT.java | 10 +++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java index 8b7bc2978ba1..9d786d4761f0 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java @@ -189,6 +189,10 @@ public WorkspaceItemBuilder withAbstract(final String subject) { return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject); } + public WorkspaceItemBuilder withType(final String type) { + return addMetadataValue(MetadataSchemaEnum.DC.getName(),"type", null, type); + } + public WorkspaceItemBuilder grantLicense() { Item item = workspaceItem.getItem(); String license; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index ad9b8046f25a..8b2f3f093a37 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -8582,11 +8582,13 @@ public void testSubmissionWithHiddenSections() throws Exception { WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) .withTitle("Workspace Item") .withIssueDate("2023-01-01") + .withType("book") .build(); context.restoreAuthSystemState(); + String adminToken = getAuthToken(admin.getEmail(), password); - getClient(getAuthToken(admin.getEmail(), password)) + getClient(adminToken) .perform(get("/api/submission/workspaceitems/" + workspaceItem.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$.sections.test-outside-workflow-hidden").doesNotExist()) @@ -8594,5 +8596,11 @@ public void testSubmissionWithHiddenSections() throws Exception { .andExpect(jsonPath("$.sections.test-never-hidden").exists()) .andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist()); + // Deposit the item + getClient(adminToken).perform(post("/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + workspaceItem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()); + } } From dae8f166e1673208402c0be5799180ebea76c212 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 1 Jun 2023 14:48:46 +0200 Subject: [PATCH 258/686] [DURACOM-126] add java doc --- .../main/java/org/dspace/app/util/SubmissionStepConfig.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java index 1a8f2744b818..28d39d911b95 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java @@ -176,6 +176,12 @@ public String getVisibilityOutside() { return visibilityOutside; } + /** + * Check if given submission section object is hidden for the current submission scope + * + * @param obj the InProgressSubmission to check + * @return true if the submission section is hidden, false otherwise + */ public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) { String scopeToCheck = getScope(obj); From 0ec27875bcb3bab66a38ddc3987ed401c67a2f0d Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Wed, 31 May 2023 16:47:02 +0200 Subject: [PATCH 259/686] [DURACOM-149] use right formatter for mapping of dc.date.issued in pubmed live import; added integration tests --- .../PubmedDateMetadatumContributor.java | 18 +- .../PubmedImportMetadataSourceServiceIT.java | 213 ++++++++++++++++++ .../app/rest/pubmedimport-fetch-test.xml | 14 ++ .../app/rest/pubmedimport-fetch-test2.xml | 14 ++ .../app/rest/pubmedimport-search-test.xml | 194 ++++++++++++++++ .../app/rest/pubmedimport-search-test2.xml | 132 +++++++++++ 6 files changed, 578 insertions(+), 7 deletions(-) create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index 6536026058ec..add9caef1b74 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -15,8 +15,8 @@ import java.util.LinkedList; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.DCDate; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -107,28 +107,30 @@ public Collection contributeMetadata(T t) { LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM"); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy"); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size()) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); - values.add(metadataFieldMapping.toDCValue(field, formatter.format(date))); - break; + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -138,7 +140,9 @@ public Collection contributeMetadata(T t) { } j++; } - if (dcDate == null) { + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); + } else { log.info( "Failed parsing " + dateString + ", check " + "the configured dataformats in config/spring/api/pubmed-integration.xml"); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java new file mode 100644 index 000000000000..79b8ec3f7297 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -0,0 +1,213 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl; + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Test + public void pubmedImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void pubmedImportMetadataGetRecords2Test() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords2(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, + "Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review."); + MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize" + + " the main strategies for teaching clinical reasoning described in the literature in the context of" + + " advanced clinical practice and promote new areas of research to improve the pedagogical approach" + + " to clinical reasoning in Advanced Practice Nursing."); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and" + + " clinical thinking are essential elements in the advanced nursing clinical practice decision-making" + + " process. The quality improvement of care is related to the development of those skills." + + " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical" + + " reasoning in advanced clinical practice."); + MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was" + + " conducted using the framework developed by Arksey and O'Malley as a research strategy." + + " Consistent with the nature of scoping reviews, a study protocol has been established."); + MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and" + + " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary" + + " revision studies, published in biomedical databases, were selected, including qualitative ones." + + " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID." + + " Three authors independently evaluated the articles for titles, abstracts, and full text."); + MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined," + + " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility," + + " and 27 were included in the scoping review. The results that emerged from the review were" + + " interpreted and grouped into three macro strategies (simulations-based education, art and visual" + + " thinking, and other learning approaches) and nineteen educational interventions."); + MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different" + + " strategies, the simulations are the most used. Despite this, our scoping review reveals that is" + + " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic" + + " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to" + + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + + " necessary to relate teaching methodologies with the skills developed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + + metadatums.add(title); + metadatums.add(description1); + metadatums.add(description2); + metadatums.add(description3); + metadatums.add(description4); + metadatums.add(description5); + metadatums.add(description6); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(author5); + metadatums.add(author6); + metadatums.add(date); + metadatums.add(language); + metadatums.add(subject1); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + + private ArrayList getRecords2() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez."); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely" + + " used interfaces for the retrieval of information from biological databases is the NCBI Entrez" + + " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between" + + " the individual entries found in numerous public databases. The existence of such natural" + + " connections, mostly biological in nature, argued for the development of a method through which" + + " all the information about a particular biological entity could be found without having to" + + " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based" + + " searches, illustrating the types of information that can be retrieved through the Entrez system." + + " An alternate protocol builds upon the first basic protocol, using additional," + + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + + " visualization tool, is also discussed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + + metadatums.add(title); + metadatums.add(description); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(date); + metadatums.add(language); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml new file mode 100644 index 000000000000..4f921658e32b --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b5ab65e3b2b2253cd3a + + 36708638 + + + "10 1016 j nepr 2023 103548"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml new file mode 100644 index 000000000000..1ff9570777a7 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b12ccf058150336d6a8 + + 21975942 + + + "10 1002 0471142905 hg0610s71"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml new file mode 100644 index 000000000000..666fb1e7d550 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml @@ -0,0 +1,194 @@ + + + + + + 36708638 + + 2023 + 02 + 23 + + + 2023 + 02 + 23 + +
+ + 1873-5223 + + 67 + + 2023 + Feb + + + Nurse education in practice + Nurse Educ Pract + + Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review. + + 103548 + 103548 + + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing. + Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice. + A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established. + The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text. + 1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions. + Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed. + Copyright © 2023 Elsevier Ltd. All rights reserved. + + + + Giuffrida + Silvia + S + + Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch. + + + + Silano + Verdiana + V + + Nursing Direction of Settore Anziani Città di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it. + + + + Ramacciati + Nicola + N + + Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it. + + + + Prandi + Cesarina + C + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch. + + + + Baldon + Alessia + A + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch. + + + + Bianchi + Monica + M + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch. + + + + eng + + Journal Article + Review + + + 2023 + 01 + 17 + +
+ + Scotland + Nurse Educ Pract + 101090848 + 1471-5953 + + IM + + + Humans + + + Advanced Practice Nursing + + + Learning + + + Curriculum + + + Thinking + + + Clinical Reasoning + + + Students, Nursing + + + + Advanced practice nursing + Clinical reasoning + Critical thinking + Educational strategies + Nursing education + Teaching methodology + + Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper. +
+ + + + 2022 + 11 + 9 + + + 2022 + 12 + 17 + + + 2023 + 1 + 10 + + + 2023 + 1 + 29 + 6 + 0 + + + 2023 + 2 + 25 + 6 + 0 + + + 2023 + 1 + 28 + 18 + 7 + + + ppublish + + 36708638 + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + +
+
\ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml new file mode 100644 index 000000000000..949d3b1250b2 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml @@ -0,0 +1,132 @@ + + + + + + 21975942 + + 2012 + 01 + 13 + + + 2016 + 10 + 21 + +
+ + 1934-8258 + + Chapter 6 + + 2011 + Oct + + + Current protocols in human genetics + Curr Protoc Hum Genet + + Searching NCBI Databases Using Entrez. + + Unit6.10 + Unit6.10 + + 10.1002/0471142905.hg0610s71 + + One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed. + © 2011 by John Wiley & Sons, Inc. + + + + Gibney + Gretchen + G + + + Baxevanis + Andreas D + AD + + + eng + + Journal Article + +
+ + United States + Curr Protoc Hum Genet + 101287858 + 1934-8258 + + IM + + + Animals + + + Database Management Systems + + + Databases, Factual + + + Humans + + + Information Storage and Retrieval + methods + + + Internet + + + Molecular Conformation + + + National Library of Medicine (U.S.) + + + PubMed + + + United States + + + User-Computer Interface + + +
+ + + + 2011 + 10 + 7 + 6 + 0 + + + 2011 + 10 + 7 + 6 + 0 + + + 2012 + 1 + 14 + 6 + 0 + + + ppublish + + 21975942 + 10.1002/0471142905.hg0610s71 + + +
+
\ No newline at end of file From eb224eb8096fa9d7173cace2a2b8625d40b807eb Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Sun, 14 May 2023 21:52:31 +0200 Subject: [PATCH 260/686] 100553: Added stricter metadata field & schema validation --- .../MetadataFieldRestRepository.java | 12 ++-- .../MetadataSchemaRestRepository.java | 6 +- .../rest/MetadataSchemaRestRepositoryIT.java | 16 ++++- .../rest/MetadatafieldRestRepositoryIT.java | 60 +++++++++++++++++-- 4 files changed, 81 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index eefd6331d116..5152f11902f5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -260,14 +260,18 @@ protected MetadataFieldRest createAndReturn(Context context) if (isBlank(metadataFieldRest.getElement())) { throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); - } else if (metadataFieldRest.getElement().contains(".")) { - throw new UnprocessableEntityException("metadata element (in request body) cannot contain dots"); + } else if (!metadataFieldRest.getElement().matches("^[^. ,]{1,64}$")) { + throw new UnprocessableEntityException( + "metadata element (in request body) cannot contain dots, commas or spaces and should be smaller than" + + " 64 characters"); } if (isBlank(metadataFieldRest.getQualifier())) { metadataFieldRest.setQualifier(null); - } else if (metadataFieldRest.getQualifier().contains(".")) { - throw new UnprocessableEntityException("metadata qualifier (in request body) cannot contain dots"); + } else if (!metadataFieldRest.getQualifier().matches("^[^. ,]{1,64}$")) { + throw new UnprocessableEntityException( + "metadata qualifier (in request body) cannot contain dots, commas or spaces and should be smaller" + + " than 64 characters"); } // create diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java index ba49a839bbe5..d9c148b71c0d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataSchemaRestRepository.java @@ -93,8 +93,10 @@ protected MetadataSchemaRest createAndReturn(Context context) // validate fields if (isBlank(metadataSchemaRest.getPrefix())) { throw new UnprocessableEntityException("metadata schema name cannot be blank"); - } else if (metadataSchemaRest.getPrefix().contains(".")) { - throw new UnprocessableEntityException("metadata schema namespace cannot contain dots"); + } else if (!metadataSchemaRest.getPrefix().matches("^[^. ,]{1,32}$")) { + throw new UnprocessableEntityException( + "metadata schema namespace cannot contain dots, commas or spaces and should be smaller than" + + " 32 characters"); } if (isBlank(metadataSchemaRest.getNamespace())) { throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index 4017d5f2da63..72508a0dad58 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -117,7 +117,7 @@ public void createSuccess() throws Exception { } @Test - public void createUnprocessableEntity_prefixContainingDots() throws Exception { + public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception { context.turnOffAuthorisationSystem(); MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") @@ -135,6 +135,20 @@ public void createUnprocessableEntity_prefixContainingDots() throws Exception { .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) .contentType(contentType)) .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test,SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index da8dacb46935..6c3872c7eca6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -50,12 +50,12 @@ */ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest { - private static final String ELEMENT = "test element"; - private static final String QUALIFIER = "test qualifier"; + private static final String ELEMENT = "test_element"; + private static final String QUALIFIER = "test_qualifier"; private static final String SCOPE_NOTE = "test scope_note"; - private static final String ELEMENT_UPDATED = "test element updated"; - private static final String QUALIFIER_UPDATED = "test qualifier updated"; + private static final String ELEMENT_UPDATED = "test_element_updated"; + private static final String QUALIFIER_UPDATED = "test_qualifier_updated"; private static final String SCOPE_NOTE_UPDATED = "test scope_note updated"; private MetadataSchema metadataSchema; @@ -758,7 +758,7 @@ public void createUnauthorized() throws Exception { } @Test - public void createUnprocessableEntity_elementContainingDots() throws Exception { + public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception { MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setElement("testElement.ForCreate"); metadataFieldRest.setQualifier(QUALIFIER); @@ -775,10 +775,34 @@ public void createUnprocessableEntity_elementContainingDots() throws Exception { .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); } @Test - public void createUnprocessableEntity_qualifierContainingDots() throws Exception { + public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception { MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setElement(ELEMENT); metadataFieldRest.setQualifier("testQualifier.ForCreate"); @@ -795,6 +819,30 @@ public void createUnprocessableEntity_qualifierContainingDots() throws Exception .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); } @Test From d76bdc6d476fa07ce518fa920dd0b1559228171f Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Thu, 1 Jun 2023 18:47:52 +0200 Subject: [PATCH 261/686] CST-9634 Assure that REST API assumes all dates to be UTC if not specified --- .../ResourcePolicyEndDateAddOperation.java | 11 +++---- ...ResourcePolicyEndDateReplaceOperation.java | 11 +++---- .../ResourcePolicyStartDateAddOperation.java | 11 +++---- ...sourcePolicyStartDateReplaceOperation.java | 11 +++---- .../resourcePolicy/ResourcePolicyUtils.java | 31 ++++++++----------- .../AccessConditionReplacePatchOperation.java | 21 ++++--------- 6 files changed, 39 insertions(+), 57 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java index b06637bad240..0d426c96d06a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateAddOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -58,11 +58,10 @@ public R perform(Context context, R resource, Operation operation) { */ private void add(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setEndDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid endDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + resourcePolicy.setEndDate(date); + if (date == null) { + throw new DSpaceBadRequestException("Invalid endDate value " + dateS); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java index a71224ea294d..fc4e7a63ca87 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyEndDateReplaceOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -58,12 +58,11 @@ public R perform(Context context, R resource, Operation operation) { */ private void replace(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setEndDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid endDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid endDate value " + dateS); } + resourcePolicy.setEndDate(date); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java index f8f74b65868d..f19d7043cf7d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateAddOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -59,12 +59,11 @@ public R perform(Context context, R resource, Operation operation) { */ private void add(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setStartDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid startDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid startDate value " + dateS); } + resourcePolicy.setStartDate(date); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java index a6812f658132..2d1425341071 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyStartDateReplaceOperation.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; @@ -15,6 +14,7 @@ import org.dspace.app.rest.repository.patch.operation.PatchOperation; import org.dspace.authorize.ResourcePolicy; import org.dspace.core.Context; +import org.dspace.util.MultiFormatDateParser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -58,12 +58,11 @@ public R perform(Context context, R resource, Operation operation) { */ private void replace(ResourcePolicy resourcePolicy, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = resourcePolicyUtils.simpleDateFormat.parse(dateS); - resourcePolicy.setStartDate(date); - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid startDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid startDate value " + dateS); } + resourcePolicy.setStartDate(date); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java index 435480e318ef..7718260be7c4 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java @@ -7,13 +7,12 @@ */ package org.dspace.app.rest.repository.patch.operation.resourcePolicy; -import java.text.ParseException; -import java.text.SimpleDateFormat; import java.util.Date; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.patch.Operation; import org.dspace.authorize.ResourcePolicy; +import org.dspace.util.MultiFormatDateParser; import org.springframework.stereotype.Component; /** @@ -25,8 +24,6 @@ @Component public class ResourcePolicyUtils { - public static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); - /** * Paths in json body of patched that use these resourcePolicy operations */ @@ -112,13 +109,12 @@ public void checkResourcePolicyForExistingDescriptionValue(ResourcePolicy resour */ public void checkResourcePolicyForConsistentStartDateValue(ResourcePolicy resource, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = simpleDateFormat.parse(dateS); - if (resource.getEndDate() != null && resource.getEndDate().before(date)) { - throw new DSpaceBadRequestException("Attempting to set an invalid startDate greater than the endDate."); - } - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid startDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid startDate value " + dateS); + } + if (resource.getEndDate() != null && resource.getEndDate().before(date)) { + throw new DSpaceBadRequestException("Attempting to set an invalid startDate greater than the endDate."); } } @@ -134,13 +130,12 @@ public void checkResourcePolicyForConsistentStartDateValue(ResourcePolicy resour */ public void checkResourcePolicyForConsistentEndDateValue(ResourcePolicy resource, Operation operation) { String dateS = (String) operation.getValue(); - try { - Date date = simpleDateFormat.parse(dateS); - if (resource.getStartDate() != null && resource.getStartDate().after(date)) { - throw new DSpaceBadRequestException("Attempting to set an invalid endDate smaller than the startDate."); - } - } catch (ParseException e) { - throw new DSpaceBadRequestException("Invalid endDate value", e); + Date date = MultiFormatDateParser.parse(dateS); + if (date == null) { + throw new DSpaceBadRequestException("Invalid endDate value " + dateS); + } + if (resource.getStartDate() != null && resource.getStartDate().after(date)) { + throw new DSpaceBadRequestException("Attempting to set an invalid endDate smaller than the startDate."); } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java index 0216628a6b87..1e45293093dd 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java @@ -8,8 +8,6 @@ package org.dspace.app.rest.submit.factory.impl; import java.sql.SQLException; import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Objects; @@ -29,6 +27,7 @@ import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionConfigurationService; import org.dspace.submit.model.AccessConditionOption; +import org.dspace.util.MultiFormatDateParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -145,20 +144,12 @@ private void updatePolicy(Context context, String valueToReplare, String attribu } } - private Date parseDate(String date) { - List knownPatterns = Arrays.asList( - new SimpleDateFormat("yyyy-MM-dd"), - new SimpleDateFormat("dd-MM-yyyy"), - new SimpleDateFormat("yyyy/MM/dd"), - new SimpleDateFormat("dd/MM/yyyy")); - for (SimpleDateFormat pattern : knownPatterns) { - try { - return pattern.parse(date); - } catch (ParseException e) { - log.error(e.getMessage(), e); - } + private Date parseDate(String dateString) { + Date date = MultiFormatDateParser.parse(dateString); + if (date == null) { + throw new UnprocessableEntityException("Provided format of date:" + dateString + " is not supported!"); } - throw new UnprocessableEntityException("Provided format of date:" + date + " is not supported!"); + return date; } private String getValue(Object value) { From b78ddf249847ebfe3013a164c938316b8782b6b3 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Thu, 1 Jun 2023 19:38:01 +0200 Subject: [PATCH 262/686] CST-9634 make public the validateResourcePolicy method --- .../java/org/dspace/submit/model/AccessConditionOption.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index b29af4e7733c..e5cd86f50458 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -195,7 +195,7 @@ public void updateResourcePolicy(Context context, ResourcePolicy resourcePolicy) * configured maximum. * @throws ParseException passed through. */ - private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) + public void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) throws IllegalStateException, ParseException { LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}", name, startDate, endDate); From 1b7af28dd3420000049c75c80666e9e777aa6bdf Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Thu, 1 Jun 2023 21:07:00 +0200 Subject: [PATCH 263/686] CST-9634 remove unnecessary turn off/on auth system --- .../org/dspace/app/bulkaccesscontrol/BulkAccessControl.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index 86061559798d..bef5a15e03f1 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -163,8 +163,6 @@ public void internalRun() throws Exception { throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control"); } - context.turnOffAuthorisationSystem(); - if (uuids == null || uuids.size() == 0) { handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)"); throw new IllegalArgumentException("At least one target uuid must be provided"); @@ -184,7 +182,6 @@ public void internalRun() throws Exception { validate(accessControl); updateItemsAndBitstreamsPolices(accessControl); context.complete(); - context.restoreAuthSystemState(); } catch (Exception e) { handler.handleException(e); context.abort(); From 70a7930f63d2a98a024aa1d002f799d110e5e8ab Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Fri, 2 Jun 2023 09:45:52 +0700 Subject: [PATCH 264/686] Tweaks fo test cases. --- .../org/dspace/discovery/DiscoveryIT.java | 36 +++++++++---------- .../utils/RestDiscoverQueryBuilderTest.java | 7 ++-- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 0c3a52ec79f5..164525afb11f 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -768,6 +768,7 @@ public void searchWithDefaultSortServiceTest() throws SearchServiceException { .withTitle("item " + i) .build(); } + context.restoreAuthSystemState(); // Build query with default parameters (except for workspaceConf) DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() @@ -776,26 +777,21 @@ public void searchWithDefaultSortServiceTest() throws SearchServiceException { DiscoverResult result = searchService.search(context, discoverQuery); - if (defaultSortField.getMetadataField().equals("dc_date_accessioned")) { - // Verify that search results are sort by dc_date_accessioned - LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() - .map(o -> ((Item) o.getIndexedObject()).getMetadata()) - .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) - .map(m -> m.getValue()).findFirst().orElse("") - ) - .collect(Collectors.toCollection(LinkedList::new)); - assertFalse(dc_date_accesioneds.isEmpty()); - for (int i = 1; i < dc_date_accesioneds.size() - 1; i++) { - assertTrue(dc_date_accesioneds.get(i).compareTo(dc_date_accesioneds.get(i + 1)) >= 0); - } - } else if (defaultSortField.getMetadataField().equals("lastModified")) { - LinkedList lastModifieds = result.getIndexableObjects().stream() - .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) - .collect(Collectors.toCollection(LinkedList::new)); - assertFalse(lastModifieds.isEmpty()); - for (int i = 1; i < lastModifieds.size() - 1; i++) { - assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); - } + /* + // code example for testing against sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + }*/ + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 511bb8f98b7b..e21f395f0907 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -171,15 +171,12 @@ public void testSortByScore() throws Exception { @Test public void testSortByDefaultSortField() throws Exception { - page = PageRequest.of(2, 10, Sort.Direction.DESC, "dc.date.accessioned"); + page = PageRequest.of(2, 10); restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); verify(discoverQueryBuilder, times(1)) .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), - page.getPageSize(), page.getOffset(), - discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField().getMetadataField(), - discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField() - .getDefaultSortOrder().name().toUpperCase()); + page.getPageSize(), page.getOffset(), null, null); } @Test(expected = DSpaceBadRequestException.class) From d565cd3e6b343c8928b5a9096d19a48ed1c89ee1 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 2 Jun 2023 08:21:32 +0200 Subject: [PATCH 265/686] 102052: Return 204 status when primaryBitstream is null --- .../dspace/app/rest/PrimaryBitstreamController.java | 10 +++++++--- .../dspace/app/rest/PrimaryBitstreamControllerIT.java | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java index cdcbbce7476e..9afb367b1689 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java @@ -18,7 +18,6 @@ import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.BitstreamRest; import org.dspace.app.rest.model.BundleRest; -import org.dspace.app.rest.model.hateoas.BitstreamResource; import org.dspace.app.rest.model.hateoas.BundleResource; import org.dspace.app.rest.repository.BundlePrimaryBitstreamLinkRepository; import org.dspace.app.rest.utils.ContextUtil; @@ -67,10 +66,15 @@ public class PrimaryBitstreamController { */ @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'READ')") @RequestMapping(method = RequestMethod.GET) - public BitstreamResource getPrimaryBitstream(@PathVariable UUID uuid, + public ResponseEntity> getPrimaryBitstream(@PathVariable UUID uuid, HttpServletRequest request) { BitstreamRest bitstreamRest = repository.getPrimaryBitstream(null, uuid, null, utils.obtainProjection()); - return converter.toResource(bitstreamRest); + if (bitstreamRest == null) { + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } else { + return ControllerUtils.toResponseEntity(HttpStatus.OK, new HttpHeaders(), + (RepresentationModel) converter.toResource(bitstreamRest)); + } } /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java index 630ef257deaa..f42166f25ab8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -96,7 +96,7 @@ public void testGetPrimaryBitstreamBundleNotFound() throws Exception { public void testGetPrimaryBitstreamNonExisting() throws Exception { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get(getBundleUrl(bundle.getID()))) - .andExpect(status().isOk()) + .andExpect(status().isNoContent()) .andExpect(jsonPath("$").doesNotExist()); } From 7c85b007c027c959217b3b9bc5d730006223d448 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Fri, 2 Jun 2023 10:10:45 +0200 Subject: [PATCH 266/686] [DURACOM-149] remove trailing whitespace --- .../dspace/app/rest/PubmedImportMetadataSourceServiceIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java index 79b8ec3f7297..3b39d251216c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -30,7 +30,7 @@ /** * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} - * + * * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { From d6d6a3e0740839271c5906f9c2ea46f833d01326 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Mon, 5 Jun 2023 09:45:40 +0200 Subject: [PATCH 267/686] 102052: Replace 'DELETE' with 'WRITE' on deletePrimaryBitstream + More auth ITs --- .../app/rest/PrimaryBitstreamController.java | 2 +- .../BundlePrimaryBitstreamLinkRepository.java | 2 +- .../rest/PrimaryBitstreamControllerIT.java | 286 +++++++++++++++++- 3 files changed, 279 insertions(+), 11 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java index 9afb367b1689..59ab1bba763a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java @@ -134,7 +134,7 @@ public BundleResource updatePrimaryBitstream(@PathVariable UUID uuid, * @param request The HttpServletRequest * @return The Bundle of which the primaryBitstream was deleted */ - @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'DELETE')") + @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'WRITE')") @RequestMapping(method = RequestMethod.DELETE) public ResponseEntity> deletePrimaryBitstream(@PathVariable UUID uuid, HttpServletRequest request) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java index c2f6dac5f228..a013aa0dd002 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java @@ -112,7 +112,7 @@ public BundleRest updatePrimaryBitstream(Context context, UUID bundleId, * @param context The current DSpace context * @param bundleId The UUID of the Bundle */ - @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'DELETE')") + @PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'WRITE')") public void deletePrimaryBitstream(Context context, UUID bundleId) { try { Bundle bundle = setPrimaryBitstream(context, bundleId, null, false); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java index f42166f25ab8..d8bc6c8cf98a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -52,6 +52,8 @@ public class PrimaryBitstreamControllerIT extends AbstractControllerIntegrationT Item item; Bitstream bitstream; Bundle bundle; + Community community; + Collection collection; @Before @Override @@ -59,8 +61,8 @@ public void setUp() throws Exception { super.setUp(); context.turnOffAuthorisationSystem(); - Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); + community = CommunityBuilder.createCommunity(context).build(); + collection = CollectionBuilder.createCollection(context, community).build(); item = ItemBuilder.createItem(context, collection).build(); // create bitstream in ORIGINAL bundle of item @@ -142,7 +144,7 @@ public void testPostPrimaryBitstreamInvalidBitstream() throws Exception { public void testPostPrimaryBitstreamAlreadyExists() throws Exception { context.turnOffAuthorisationSystem(); bundle.setPrimaryBitstreamID(bitstream); - Bitstream bitstream2 = createSecondBitstream(bundle); + Bitstream bitstream2 = createBitstream(bundle); context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); @@ -159,7 +161,7 @@ public void testPostPrimaryBitstreamAlreadyExists() throws Exception { public void testPostPrimaryBitstreamNotInBundle() throws Exception { context.turnOffAuthorisationSystem(); Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); - Bitstream bitstream2 = createSecondBitstream(bundle2); + Bitstream bitstream2 = createBitstream(bundle2); context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); @@ -172,11 +174,95 @@ public void testPostPrimaryBitstreamNotInBundle() throws Exception { Assert.assertNull(bundle.getPrimaryBitstream()); } + @Test + public void testPostPrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundleUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundleUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundleUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamForbidden() throws Exception { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamUnauthenticated() throws Exception { + getClient().perform(post(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isUnauthorized()); + } + @Test public void testUpdatePrimaryBitstream() throws Exception { context.turnOffAuthorisationSystem(); bundle.setPrimaryBitstreamID(bitstream); - Bitstream bitstream2 = createSecondBitstream(bundle); + Bitstream bitstream2 = createBitstream(bundle); context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); @@ -231,7 +317,7 @@ public void testUpdatePrimaryBitstreamNotInBundle() throws Exception { context.turnOffAuthorisationSystem(); bundle.setPrimaryBitstreamID(bitstream); Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); - Bitstream bitstream2 = createSecondBitstream(bundle2); + Bitstream bitstream2 = createBitstream(bundle2); context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); @@ -244,6 +330,105 @@ public void testUpdatePrimaryBitstreamNotInBundle() throws Exception { Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); } + @Test + public void testUpdatePrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundleUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundleUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundleUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamForbidden() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(put(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamUnauthenticated() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + getClient().perform(put(getBundleUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnauthorized()); + } + @Test public void testDeletePrimaryBitstream() throws Exception { bundle.setPrimaryBitstreamID(bitstream); @@ -276,6 +461,89 @@ public void testDeletePrimaryBitstreamBundleNonExisting() throws Exception { Assert.assertNull(bundle.getPrimaryBitstream()); } + @Test + public void testDeletePrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + } + + @Test + public void testDeletePrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + } + + @Test + public void testDeletePrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + } + + @Test + public void testDeletePrimaryBitstreamForbidden() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundleUrl(bundle.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamUnauthenticated() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + getClient().perform(delete(getBundleUrl(bundle.getID()))) + .andExpect(status().isUnauthorized()); + } + private String getBundleUrl(UUID uuid) { return "/api/core/bundles/" + uuid + "/primaryBitstream"; } @@ -284,11 +552,11 @@ private String getBitstreamUrl(UUID uuid) { return "/api/core/bitstreams/" + uuid; } - private Bitstream createSecondBitstream(Bundle bundle) throws Exception { - String bitstreamContent = "Second Bitstream"; + private Bitstream createBitstream(Bundle bundle) throws Exception { + String bitstreamContent = "Bitstream Content"; try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { return BitstreamBuilder.createBitstream(context, bundle, is) - .withName("Bitstream2") + .withName("Bitstream") .withMimeType("text/plain") .build(); } From a38ff421694a5be590a41928985e3f8cd54c1b37 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 25 May 2023 14:52:11 +0300 Subject: [PATCH 268/686] dspace: capture publisher from CrossRef live import Publisher is a required field on CrossRef so we can always rely on capturing this information when doing a live import. See: https://github.com/CrossRef/rest-api-doc/blob/master/api_format.md --- .../app/rest/CrossRefImportMetadataSourceServiceIT.java | 8 +++++++- dspace/config/spring/api/crossref-integration.xml | 9 +++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 11fe58ac1d2e..72524709ec65 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -153,6 +153,8 @@ private ArrayList getRecords() { MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums.add(title); metadatums.add(author); @@ -163,6 +165,7 @@ private ArrayList getRecords() { metadatums.add(issn); metadatums.add(volume); metadatums.add(issue); + metadatums.add(publisher); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -179,6 +182,8 @@ private ArrayList getRecords() { MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums2.add(title2); metadatums2.add(author2); @@ -189,6 +194,7 @@ private ArrayList getRecords() { metadatums2.add(issn2); metadatums2.add(volume2); metadatums2.add(issue2); + metadatums2.add(publisher2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); @@ -196,4 +202,4 @@ private ArrayList getRecords() { return records; } -} \ No newline at end of file +} diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml index 5d67c17626e2..d1e416d2b0c6 100644 --- a/dspace/config/spring/api/crossref-integration.xml +++ b/dspace/config/spring/api/crossref-integration.xml @@ -30,6 +30,7 @@ + @@ -137,6 +138,14 @@ + + + + + + + + From 9a49998ea0bf8955ebb3063ce11e2f22a2bc06eb Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Mon, 5 Jun 2023 16:02:42 +0200 Subject: [PATCH 269/686] 102124: Add config for opening formats as attachment + IT --- .../test/data/dspaceFolder/config/local.cfg | 6 +++ .../app/rest/BitstreamRestController.java | 17 ++++++- .../app/rest/BitstreamRestControllerIT.java | 48 +++++++++++++++++++ dspace/config/dspace.cfg | 5 ++ 4 files changed, 75 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 3c19a68e9fd1..144116f212eb 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -144,3 +144,9 @@ authentication-ip.Student = 6.6.6.6 useProxies = true proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.include_ui_ip = true + +########################### +# BITSTREAM CONFIGURATION # +########################### +# Files with these mimetypes, extensions will never be opened inline, and should be downloaded +webui.content_disposition_format = text/richtext, text/xml, txt diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java index 75d3c9886cf1..4e78d5dae706 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java @@ -156,8 +156,11 @@ public ResponseEntity retrieve(@PathVariable UUID uuid, HttpServletResponse resp } //Determine if we need to send the file as a download or if the browser can open it inline + //The file will be downloaded if its size is larger than the configured threshold, + //or if its mimetype/extension appears in the "webui.content_disposition_format" config long dispositionThreshold = configurationService.getLongProperty("webui.content_disposition_threshold"); - if (dispositionThreshold >= 0 && filesize > dispositionThreshold) { + if ((dispositionThreshold >= 0 && filesize > dispositionThreshold) + || checkFormatForContentDisposition(format)) { httpHeadersInitializer.withDisposition(HttpHeadersInitializer.CONTENT_DISPOSITION_ATTACHMENT); } @@ -204,6 +207,18 @@ private boolean isNotAnErrorResponse(HttpServletResponse response) { || responseCode.equals(Response.Status.Family.REDIRECTION); } + private boolean checkFormatForContentDisposition(BitstreamFormat format) { + List formats = List.of((configurationService.getArrayProperty("webui.content_disposition_format"))); + boolean download = formats.contains(format.getMIMEType()); + for (String ext : format.getExtensions()) { + if (formats.contains(ext)) { + download = true; + break; + } + } + return download; + } + /** * This method will update the bitstream format of the bitstream that corresponds to the provided bitstream uuid. * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index f07aae876f32..9b4aeed1e208 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -22,6 +22,7 @@ import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST_VALUE; @@ -968,4 +969,51 @@ public void updateBitstreamFormatAdmin() throws Exception { bitstreamService.getMetadataByMetadataString(bitstream, "dc.format") )); } + + @Test + public void checkContentDispositionOfFormats() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).build(); + String content = "Test Content"; + Bitstream rtf; + Bitstream xml; + Bitstream txt; + Bitstream html; + try (InputStream is = IOUtils.toInputStream(content, CharEncoding.UTF_8)) { + rtf = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/richtext").build(); + xml = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/xml").build(); + txt = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/plain").build(); + html = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/html").build(); + } + context.restoreAuthSystemState(); + + // these formats are configured and files should be downloaded + verifyBitstreamDownload(rtf, "text/richtext", true); + verifyBitstreamDownload(xml, "text/xml", true); + verifyBitstreamDownload(txt, "text/plain", true); + // this format is not configured and should open inline + verifyBitstreamDownload(html, "text/html", false); + } + + private void verifyBitstreamDownload(Bitstream file, String contentType, boolean shouldDownload) throws Exception { + String token = getAuthToken(admin.getEmail(), password); + String header = getClient(token).perform(get("/api/core/bitstreams/" + file.getID() + "/content") + .header("Accept", contentType)) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andReturn().getResponse().getHeader("content-disposition"); + if (shouldDownload) { + assertTrue(header.contains("attachment")); + assertFalse(header.contains("inline")); + } else { + assertTrue(header.contains("inline")); + assertFalse(header.contains("attachment")); + } + } } diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 02c618abf420..d81346012164 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1286,6 +1286,11 @@ websvc.opensearch.formats = atom,rss # Use -1 to force all bitstream to be served inline webui.content_disposition_threshold = 8388608 +#### Content Attachment Disposition Formats #### +# +# Set which mimetypes, file extensions will NOT be opened inline +# Files with these mimetypes/extensions will always be downloaded +webui.content_disposition_format = text/richtext #### Multi-file HTML document/site settings ##### # TODO: UNSUPPORTED in DSpace 7.0. May be re-added in a later release From a0a1844de7c6aa4fdc5a743b402e1c056c57ace5 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 5 Jun 2023 16:01:31 -0500 Subject: [PATCH 270/686] Fix test class compilation --- .../src/test/java/org/dspace/discovery/DiscoveryIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 164525afb11f..55be531418ae 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -7,6 +7,7 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -746,8 +747,8 @@ public void iteratorSearchServiceTest() throws SearchServiceException { */ @Test public void searchWithDefaultSortServiceTest() throws SearchServiceException { - - DiscoveryConfiguration workspaceConf = SearchUtils.getDiscoveryConfiguration("workspace", null); + DiscoveryConfiguration workspaceConf = + SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null); // Skip if no default sort option set for workspaceConf if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { return; From 3945d164e645d12955364cfc6f6128286a2b3201 Mon Sep 17 00:00:00 2001 From: Kevin Van de Velde Date: Tue, 6 Jun 2023 08:35:18 +0200 Subject: [PATCH 271/686] [Issue: #8888] Fixing the hostname parsing for the shibboleth auth --- dspace-api/src/main/java/org/dspace/core/Utils.java | 8 +++----- dspace-api/src/test/java/org/dspace/core/UtilsTest.java | 6 ++++++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Utils.java b/dspace-api/src/main/java/org/dspace/core/Utils.java index b9fff20c7674..6a487bfcf2d6 100644 --- a/dspace-api/src/main/java/org/dspace/core/Utils.java +++ b/dspace-api/src/main/java/org/dspace/core/Utils.java @@ -16,8 +16,6 @@ import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; @@ -447,14 +445,14 @@ public static String getBaseUrl(String urlString) { */ public static String getHostName(String uriString) { try { - URI uri = new URI(uriString); - String hostname = uri.getHost(); + URL url = new URL(uriString); + String hostname = url.getHost(); // remove the "www." from hostname, if it exists if (hostname != null) { return hostname.startsWith("www.") ? hostname.substring(4) : hostname; } return null; - } catch (URISyntaxException e) { + } catch (MalformedURLException e) { return null; } } diff --git a/dspace-api/src/test/java/org/dspace/core/UtilsTest.java b/dspace-api/src/test/java/org/dspace/core/UtilsTest.java index 920fa69d6d31..291561ac2536 100644 --- a/dspace-api/src/test/java/org/dspace/core/UtilsTest.java +++ b/dspace-api/src/test/java/org/dspace/core/UtilsTest.java @@ -75,6 +75,12 @@ public void testGetHostName() { assertEquals("Test keep other prefixes", "demo.dspace.org", Utils.getHostName("https://demo.dspace.org")); + assertEquals("Test with parameter", "demo.dspace.org", + Utils.getHostName("https://demo.dspace.org/search?query=test")); + + assertEquals("Test with parameter with space", "demo.dspace.org", + Utils.getHostName("https://demo.dspace.org/search?query=test turbine")); + // This uses a bunch of reserved URI characters assertNull("Test invalid URI returns null", Utils.getHostName("&+,?/@=")); } From 04161a0c4d33cb4c2ab9b64699127a34abc7287e Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Tue, 6 Jun 2023 11:24:10 +0200 Subject: [PATCH 272/686] 102124: Fix BitstreamRestController after merge --- .../org/dspace/app/rest/BitstreamRestControllerIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index ca4cf2538a3c..57360a97d364 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -1261,11 +1261,11 @@ public void checkContentDispositionOfFormats() throws Exception { context.restoreAuthSystemState(); // these formats are configured and files should be downloaded - verifyBitstreamDownload(rtf, "text/richtext", true); - verifyBitstreamDownload(xml, "text/xml", true); - verifyBitstreamDownload(txt, "text/plain", true); + verifyBitstreamDownload(rtf, "text/richtext;charset=UTF-8", true); + verifyBitstreamDownload(xml, "text/xml;charset=UTF-8", true); + verifyBitstreamDownload(txt, "text/plain;charset=UTF-8", true); // this format is not configured and should open inline - verifyBitstreamDownload(html, "text/html", false); + verifyBitstreamDownload(html, "text/html;charset=UTF-8", false); } private void verifyBitstreamDownload(Bitstream file, String contentType, boolean shouldDownload) throws Exception { From 14bb32036c2195802a7db7d790e5994244618062 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 16:01:04 +0200 Subject: [PATCH 273/686] DURACOM-136 add explaination of the commandLineParameters in the javadoc --- .../org/dspace/scripts/configuration/ScriptConfiguration.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index e22063eb4954..642409a924f7 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -87,6 +87,8 @@ public void setName(String name) { * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ public boolean isAllowedToExecute(Context context, List commandLineParameters) { From 2b523ba5ac1cbaeaf8bccd9f5b575e7564e14aae Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 16:01:47 +0200 Subject: [PATCH 274/686] DURACOM-136 improve handling and testing of invalid mimetype --- .../app/rest/ScriptProcessesController.java | 18 ++++++++++++++++-- .../app/rest/ScriptRestRepositoryIT.java | 14 +++++++++++++- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 3aeec9535be5..70149bbb6b0c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -19,9 +19,12 @@ import org.dspace.app.rest.repository.ScriptRestRepository; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; @@ -50,6 +53,9 @@ public class ScriptProcessesController { @Autowired private ScriptRestRepository scriptRestRepository; + @Autowired + private ScriptService scriptService; + @Autowired private RequestService requestService; @@ -80,9 +86,17 @@ public ResponseEntity> startProcess( @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) @PreAuthorize("hasAuthority('AUTHENTICATED')") public ResponseEntity> startProcessInvalidMimeType( - @PathVariable(name = "name") String scriptName, - @RequestParam(name = "file", required = false) List files) + @PathVariable(name = "name") String scriptName) throws Exception { + if (log.isTraceEnabled()) { + log.trace("Starting Process for Script with name: " + scriptName); + } + Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest()); + ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + + if (scriptToExecute == null) { + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); + } throw new DSpaceBadRequestException("Invalid mimetype"); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 16e691ef6f95..29a0076d0c85 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -134,6 +134,13 @@ public void findAllScriptsGenericLoggedInUserTest() throws Exception { .andExpect(jsonPath("$.page.totalElements", is(0))); } + @Test + public void findAllScriptsAnonymousUserTest() throws Exception { + getClient().perform(get("/api/system/scripts")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + @Test public void findAllScriptsLocalAdminsTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -543,8 +550,13 @@ public void postProcessAndVerifyOutput() throws Exception { public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(post("/api/system/scripts/mock-script/processes")) + .andExpect(status().isBadRequest()); + getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test From f2a63d9636907b34e572003d8162a2ee6e2aa631 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Tue, 6 Jun 2023 17:21:20 +0300 Subject: [PATCH 275/686] added new test cases added JavaDocs refactoring --- .../bulkaccesscontrol/BulkAccessControl.java | 150 +++++++++++++++++- ...lkAccessControlCliScriptConfiguration.java | 2 +- .../BulkAccessControlIT.java | 92 +++++++++++ .../BulkAccessConditionRestRepository.java | 5 +- 4 files changed, 243 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index bef5a15e03f1..c85ebf65f214 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -48,6 +48,7 @@ import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; +import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; @@ -183,11 +184,22 @@ public void internalRun() throws Exception { updateItemsAndBitstreamsPolices(accessControl); context.complete(); } catch (Exception e) { + e.printStackTrace(); handler.handleException(e); context.abort(); } } + /** + * check the validation of mapped json data, it must + * provide item or bitstream information or both of them + * and check the validation of item node if provided, + * and check the validation of bitstream node if provided. + * + * @param accessControl mapped json data + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if accessControl is invalid + */ private void validate(BulkAccessControlInput accessControl) throws SQLException { AccessConditionItem item = accessControl.getItem(); @@ -207,6 +219,16 @@ private void validate(BulkAccessControlInput accessControl) throws SQLException } } + /** + * check the validation of item node, the item mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information + * of accessCondition must be provided, + * also checking that accessConditions information are valid. + * + * @param item the item node + * @throws BulkAccessControlException if item node is invalid + */ private void validateItemNode(AccessConditionItem item) { String mode = item.getMode(); List accessConditions = item.getAccessConditions(); @@ -228,6 +250,18 @@ private void validateItemNode(AccessConditionItem item) { } } + /** + * check the validation of bitstream node, the bitstream mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information of accessConditions + * must be provided, + * also checking that constraint information is valid, + * also checking that accessConditions information are valid. + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if bitstream node is invalid + */ private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException { String mode = bitstream.getMode(); List accessConditions = bitstream.getAccessConditions(); @@ -251,6 +285,15 @@ private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQ } } + /** + * check the validation of constraint node if provided, + * constraint isn't supported when multiple uuids are provided + * or when uuid isn't an Item + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if constraint node is invalid + */ private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException { if (uuids.size() > 1 && containsConstraints(bitstream)) { handler.logError("constraint isn't supported when multiple uuids are provided"); @@ -266,6 +309,15 @@ private void validateConstraint(AccessConditionBitstream bitstream) throws SQLEx } } + /** + * check the validation of access condition, + * the access condition name must equal to one of configured access conditions, + * then call {@link AccessConditionOption#validateResourcePolicy( + * Context, String, Date, Date)} if exception happens so, it's invalid. + * + * @param accessCondition the accessCondition + * @throws BulkAccessControlException if the accessCondition is invalid + */ private void validateAccessCondition(AccessCondition accessCondition) { if (!itemAccessConditions.containsKey(accessCondition.getName())) { @@ -282,7 +334,18 @@ private void validateAccessCondition(AccessCondition accessCondition) { } } - public void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl) + /** + * find all items of provided {@link #uuids} from solr, + * then update the resource policies of items + * or bitstreams of items (only bitstreams of ORIGINAL bundles) + * and derivative bitstreams, or both of them. + * + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws SearchServiceException if a search error occurs + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl) throws SQLException, SearchServiceException, AuthorizeException { int counter = 0; @@ -358,6 +421,17 @@ private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { return discoverQuery; } + /** + * update the item resource policies, + * when mode equals to 'replace' will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ private void updateItemPolicies(Item item, BulkAccessControlInput accessControl) throws SQLException, AuthorizeException { @@ -372,6 +446,16 @@ private void updateItemPolicies(Item item, BulkAccessControlInput accessControl) logInfo(acItem.getAccessConditions(), acItem.getMode(), item); } + /** + * create the new resource policies of item. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust item's default policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ private void setItemPolicies(Item item, BulkAccessControlInput accessControl) throws SQLException, AuthorizeException { @@ -384,6 +468,16 @@ private void setItemPolicies(Item item, BulkAccessControlInput accessControl) itemService.adjustItemPolicies(context, item, item.getOwningCollection()); } + /** + * update the resource policies of all item's bitstreams + * or bitstreams specified into constraint node, + * and derivative bitstreams. + * + * NOTE: only bitstreams of ORIGINAL bundles + * + * @param item the item contains bitstreams + * @param accessControl the access control input + */ private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) { AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); @@ -396,12 +490,33 @@ private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessCo .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); } + /** + * check that the bitstream node is existed, + * and contains constraint node, + * and constraint contains uuids. + * + * @param bitstream the bitstream node + * @return true when uuids of constraint of bitstream is not empty, + * otherwise false + */ private boolean containsConstraints(AccessConditionBitstream bitstream) { return Objects.nonNull(bitstream) && Objects.nonNull(bitstream.getConstraints()) && isNotEmpty(bitstream.getConstraints().getUuid()); } + /** + * update the bitstream resource policies, + * when mode equals to replace will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws RuntimeException if something goes wrong in the database + * or an authorization error occurs + */ private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) { AccessConditionBitstream acBitstream = accessControl.getBitstream(); @@ -420,6 +535,14 @@ private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessC } + /** + * remove dspace object's read policies. + * + * @param dso the dspace object + * @param type resource policy type + * @throws BulkAccessControlException if something goes wrong + * in the database or an authorization error occurs + */ private void removeReadPolicies(DSpaceObject dso, String type) { try { resourcePolicyService.removePolicies(context, dso, type, Constants.READ); @@ -428,6 +551,18 @@ private void removeReadPolicies(DSpaceObject dso, String type) { } } + /** + * create the new resource policies of bitstream. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust bitstream's default policies. + * and also update the resource policies of its derivative bitstreams. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) throws SQLException, AuthorizeException { @@ -440,8 +575,17 @@ private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessCont mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream); } + /** + * create the resource policy from the information + * comes from the access condition. + * + * @param obj the dspace object + * @param accessCondition the access condition + * @param accessConditionOption the access condition option + * @throws BulkAccessControlException if an exception occurs + */ private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, - AccessConditionOption AccessConditionOption) { + AccessConditionOption accessConditionOption) { String name = accessCondition.getName(); String description = accessCondition.getDescription(); @@ -449,7 +593,7 @@ private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondit Date endDate = accessCondition.getEndDate(); try { - AccessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); + accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); } catch (Exception e) { throw new BulkAccessControlException(e); } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java index e329d8894a37..951c93db3030 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java @@ -32,7 +32,7 @@ public Options getOptions() { options.getOption("f").setType(InputStream.class); options.getOption("f").setRequired(true); - options.addOption("e", "eperson", true, "email of eperson doing importing"); + options.addOption("e", "eperson", true, "email of EPerson used to perform actions"); options.getOption("e").setRequired(true); options.addOption("h", "help", false, "help"); diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index 6335ecd417f7..02fe4a0bb597 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -404,6 +404,98 @@ public void performBulkAccessWithNotCollectionAdminEPersonTest() throws Exceptio )); } + @Test + public void performBulkAccessWithNotCommunityAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // add eperson to admin group + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + Community communityTwo = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-u", communityTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithNotItemAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + // add eperson to admin group + Item itemOne = ItemBuilder.createItem(context, collection) + .withAdminUser(eperson) + .build(); + + Item itemTwo = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", itemOne.getID().toString(), + "-u", itemTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + @Test public void performBulkAccessWithoutRequiredParamTest() throws Exception { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java index 8bf0c93c93d7..2bf25978efc4 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java @@ -74,8 +74,9 @@ public Class getDomainClass() { private boolean isAuthorized(Context context) { try { - return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) || - authorizeService.isItemAdmin(context); + return context.getCurrentUser() != null && + (authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) || + authorizeService.isItemAdmin(context)); } catch (SQLException e) { throw new RuntimeException(e); } From 32cd24b7538694ec964289bb13027d09ae06b829 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 19:45:31 +0200 Subject: [PATCH 276/686] DURACOM-136 restrict script endpoints to authenticated users, add test to proof that standard script are reseved to site administrator --- .../rest/repository/ScriptRestRepository.java | 4 +- .../app/rest/ScriptRestRepositoryIT.java | 42 +++++++++++++++++-- 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index 2fc996a327cc..09d65590b6f3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -59,7 +59,7 @@ public class ScriptRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 29a0076d0c85..42c9f2c9f7b4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -136,9 +136,9 @@ public void findAllScriptsGenericLoggedInUserTest() throws Exception { @Test public void findAllScriptsAnonymousUserTest() throws Exception { + // this should be changed once we allow anonymous user to execute some scripts getClient().perform(get("/api/system/scripts")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(status().isUnauthorized()); } @Test @@ -358,12 +358,48 @@ public void findOneScriptByInvalidNameBadRequestExceptionTest() throws Exception .andExpect(status().isNotFound()); } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins, nor generic users can run scripts reserved to administrator + * (i.e. default one that don't override the default + * {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation + */ @Test public void postProcessNonAdminAuthorizeException() throws Exception { - String token = getAuthToken(eperson.getEmail(), password); + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + String comAdmin_token = getAuthToken(eperson.getEmail(), password); + String colAdmin_token = getAuthToken(eperson.getEmail(), password); + String itemAdmin_token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) .andExpect(status().isForbidden()); + getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); } @Test From 5a8c7a397c342c5f931b209395965bbc365dfa3f Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 20:12:23 +0200 Subject: [PATCH 277/686] DURACOM-136 open endpoints to retrieve files of process to the user that has triggered the process --- .../ProcessFileTypesLinkRepository.java | 2 +- .../ProcessFilesLinkRepository.java | 2 +- .../ProcessOutputLinkRepository.java | 2 +- .../app/rest/ProcessRestRepositoryIT.java | 81 ++++++++++++++----- 4 files changed, 66 insertions(+), 21 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java index 8eb8d7ef652a..16c8115b29f8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java @@ -47,7 +47,7 @@ public class ProcessFileTypesLinkRepository extends AbstractDSpaceRestRepository * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public ProcessFileTypesRest getFileTypesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java index 42fcef0d628c..5d8251cf19ba 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java @@ -47,7 +47,7 @@ public class ProcessFilesLinkRepository extends AbstractDSpaceRestRepository imp * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public Page getFilesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java index f9f665d14fd1..f5b3edced2db 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java @@ -50,7 +50,7 @@ public class ProcessOutputLinkRepository extends AbstractDSpaceRestRepository im * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index d76e20b23d19..670d8e2f35b0 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -222,22 +222,35 @@ public void getAllProcessesTestStartingUser() throws Exception { @Test public void getProcessFiles() throws Exception { + context.setCurrentUser(eperson); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); - try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + getClient(epersonToken) + .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); } @Test @@ -245,25 +258,34 @@ public void getProcessFilesByFileType() throws Exception { Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - } @Test public void getProcessFilesTypes() throws Exception { + Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } List fileTypesToCheck = new LinkedList<>(); @@ -271,12 +293,18 @@ public void getProcessFilesTypes() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) .andExpect(status().isOk()) .andExpect(jsonPath("$", ProcessFileTypesMatcher - .matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); - + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ProcessFileTypesMatcher + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); } @Test @@ -811,25 +839,42 @@ public void testFindByCurrentUser() throws Exception { @Test public void getProcessOutput() throws Exception { + context.setCurrentUser(eperson); + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); + processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO); } - processService.createLogBitstream(context, process); + processService.createLogBitstream(context, process1); List fileTypesToCheck = new LinkedList<>(); fileTypesToCheck.add("inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) + getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); + String epersonToken = getAuthToken(eperson.getEmail(), password); + + getClient(epersonToken) + .perform(get("/api/system/processes/" + process1.getID() + "/output")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.type", is("bitstream"))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", + is("script_output"))); } } From bca98cd8f4e2ffda872a30341f0c951f509d2eb5 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Wed, 7 Jun 2023 08:33:51 +0200 Subject: [PATCH 278/686] 102124: Don't loop over extensions if mimetype was already found --- .../org/dspace/app/rest/BitstreamRestController.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java index 4e78d5dae706..a81d62b60148 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java @@ -210,10 +210,12 @@ private boolean isNotAnErrorResponse(HttpServletResponse response) { private boolean checkFormatForContentDisposition(BitstreamFormat format) { List formats = List.of((configurationService.getArrayProperty("webui.content_disposition_format"))); boolean download = formats.contains(format.getMIMEType()); - for (String ext : format.getExtensions()) { - if (formats.contains(ext)) { - download = true; - break; + if (!download) { + for (String ext : format.getExtensions()) { + if (formats.contains(ext)) { + download = true; + break; + } } } return download; From 11df3b8d632f7a917cd3912c72a32fce18848bc2 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Wed, 7 Jun 2023 08:46:22 +0200 Subject: [PATCH 279/686] [CST-5728] LYRASIS [Donated]: DSpace7: Implement signposting pattern in DSpace (REST) --- .../controller/LinksetRestController.java | 6 +- .../model/MetadataConfiguration.java | 42 +++++++++ .../processor/item/ItemAuthorProcessor.java | 48 +++------- .../item/ItemIdentifierProcessor.java | 38 ++++++-- .../controller/LinksetRestControllerIT.java | 90 +++++-------------- dspace/config/spring/rest/signposting.xml | 40 ++++++--- 6 files changed, 140 insertions(+), 124 deletions(-) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index e828820c0c5f..7a1271454302 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -76,7 +76,7 @@ public ResponseEntity getOne() { return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).build(); } - @PreAuthorize("permitAll()") + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/json", method = RequestMethod.GET, produces = "application/linkset+json") public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) { @@ -102,7 +102,7 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) } } - @PreAuthorize("permitAll()") + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET, produces = "application/linkset") public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) { @@ -135,7 +135,7 @@ public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) } } - @PreAuthorize("permitAll()") + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ') && hasPermission(#uuid, 'BITSTREAM', 'READ')") @RequestMapping(value = "/links" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) public List getHeader(HttpServletRequest request, @PathVariable UUID uuid) { try { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java new file mode 100644 index 000000000000..99dabd003f94 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.model; + +/** + * Represents metadata handle configuration. + */ +public class MetadataConfiguration { + + private String metadataField; + + private String pattern; + + public MetadataConfiguration() { + } + + public MetadataConfiguration(String metadataField, String pattern) { + this.metadataField = metadataField; + this.pattern = pattern; + } + + public String getMetadataField() { + return metadataField; + } + + public void setMetadataField(String metadataField) { + this.metadataField = metadataField; + } + + public String getPattern() { + return pattern; + } + + public void setPattern(String pattern) { + this.pattern = pattern; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java index b935e50bfad8..e2dc26b82785 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java @@ -7,24 +7,22 @@ */ package org.dspace.app.rest.signposting.processor.item; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.isBlank; import static org.dspace.content.Item.ANY; import java.text.MessageFormat; import java.util.List; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; -import org.dspace.content.EntityType; import org.dspace.content.Item; import org.dspace.content.MetadataFieldName; -import org.dspace.content.Relationship; -import org.dspace.content.RelationshipType; -import org.dspace.content.service.EntityTypeService; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.ItemService; -import org.dspace.content.service.RelationshipService; import org.dspace.core.Context; import org.dspace.util.FrontendUrlService; @@ -36,8 +34,6 @@ */ public class ItemAuthorProcessor extends ItemSignpostingProcessor { - private static final String IS_AUTHOR_OF = "isAuthorOf"; - /** * log4j category */ @@ -45,20 +41,12 @@ public class ItemAuthorProcessor extends ItemSignpostingProcessor { private final ItemService itemService; - private final RelationshipService relationshipService; - - private final EntityTypeService entityTypeService; - private String orcidMetadata; public ItemAuthorProcessor(FrontendUrlService frontendUrlService, - ItemService itemService, - RelationshipService relationshipService, - EntityTypeService entityTypeService) { + ItemService itemService) { super(frontendUrlService); this.itemService = itemService; - this.relationshipService = relationshipService; - this.entityTypeService = entityTypeService; setRelation(LinksetRelationType.AUTHOR); } @@ -74,26 +62,16 @@ public void setOrcidMetadata(String orcidMetadata) { public void addLinkSetNodes(Context context, HttpServletRequest request, Item item, List linksetNodes) { try { - EntityType personType = entityTypeService.findByEntityType(context, "Author"); - List itemRelationships = relationshipService.findByItem(context, item); - for (Relationship relationship : itemRelationships) { - - RelationshipType relationshipType = relationship.getRelationshipType(); - boolean hasPersonType = relationshipType.getLeftType().equals(personType) - || relationshipType.getRightType().equals(personType); - boolean isAuthor = relationshipType.getLeftwardType().startsWith(IS_AUTHOR_OF) - || relationshipType.getRightwardType().startsWith(IS_AUTHOR_OF); - - if (hasPersonType && isAuthor) { - Item authorItem = relationship.getLeftItem().getID().equals(item.getID()) - ? relationship.getRightItem() - : relationship.getLeftItem(); - + String authorId = itemService.getMetadataFirstValue(item, MetadataSchemaEnum.RELATION.getName(), + "isAuthorOfPublication", null, ANY); + if (isNotBlank(authorId)) { + Item author = itemService.findByIdOrLegacyId(context, authorId); + if (nonNull(author)) { String authorOrcid = itemService.getMetadataFirstValue( - authorItem, new MetadataFieldName(getOrcidMetadata()), ANY + author, new MetadataFieldName(getOrcidMetadata()), ANY ); - if (StringUtils.isNotBlank(authorOrcid)) { - String authorLink = StringUtils.isBlank(getPattern()) + if (isNotBlank(authorOrcid)) { + String authorLink = isBlank(getPattern()) ? authorOrcid : MessageFormat.format(getPattern(), authorOrcid); linksetNodes.add( diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java index 01151fd7d37c..79a820d702d8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java @@ -7,14 +7,19 @@ */ package org.dspace.app.rest.signposting.processor.item; +import static java.util.Objects.isNull; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + import java.text.MessageFormat; +import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.app.rest.signposting.model.MetadataConfiguration; import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.service.ItemService; @@ -35,6 +40,8 @@ public class ItemIdentifierProcessor extends ItemSignpostingProcessor { */ private static final Logger log = Logger.getLogger(ItemIdentifierProcessor.class); + private List metadataConfigurations; + private final ItemService itemService; public ItemIdentifierProcessor(FrontendUrlService frontendUrlService, ItemService itemService) { @@ -46,14 +53,23 @@ public ItemIdentifierProcessor(FrontendUrlService frontendUrlService, ItemServic @Override public void addLinkSetNodes(Context context, HttpServletRequest request, Item item, List linksetNodes) { + getMetadataConfigurations() + .forEach(metadataHandle -> handleMetadata(context, item, linksetNodes, metadataHandle)); + } + + private void handleMetadata(Context context, + Item item, + List linksetNodes, + MetadataConfiguration metadataConfiguration) { try { - List identifiers = itemService.getMetadataByMetadataString(item, getMetadataField()); + List identifiers = itemService + .getMetadataByMetadataString(item, metadataConfiguration.getMetadataField()); for (MetadataValue identifier : identifiers) { - if (identifier != null) { + if (nonNull(identifier)) { String identifierValue = identifier.getValue(); - if (StringUtils.isNotBlank(identifierValue)) { - if (StringUtils.isNotBlank(getPattern())) { - identifierValue = MessageFormat.format(getPattern(), identifierValue); + if (isNotBlank(identifierValue)) { + if (isNotBlank(metadataConfiguration.getPattern())) { + identifierValue = MessageFormat.format(metadataConfiguration.getPattern(), identifierValue); } linksetNodes.add(new LinksetNode(identifierValue, getRelation(), buildAnchor(context, item))); } @@ -64,4 +80,14 @@ public void addLinkSetNodes(Context context, HttpServletRequest request, } } + public List getMetadataConfigurations() { + if (isNull(metadataConfigurations)) { + metadataConfigurations = new ArrayList<>(); + } + return metadataConfigurations; + } + + public void setMetadataConfigurations(List metadataConfigurations) { + this.metadataConfigurations = metadataConfigurations; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 39a7ef664ba6..180ad3672170 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -15,7 +15,6 @@ import java.io.InputStream; import java.text.MessageFormat; -import java.util.List; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -34,7 +33,6 @@ import org.dspace.content.EntityType; import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; -import org.dspace.content.MetadataValue; import org.dspace.content.RelationshipType; import org.dspace.content.WorkspaceItem; import org.dspace.content.authority.Choices; @@ -57,7 +55,7 @@ public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { private static final String doiPattern = "https://doi.org/{0}"; private static final String orcidPattern = "http://orcid.org/{0}"; private static final String doi = "10.1007/978-3-642-35233-1_18"; - private static final String AUTHOR = "Author"; + private static final String PERSON_ENTITY_TYPE = "Person"; private Collection collection; @@ -247,11 +245,7 @@ public void findOneItemThatIsInWorkspaceJsonLinksets() throws Exception { context.restoreAuthSystemState(); getClient().perform(get("/signposting/linksets/" + workspaceItem.getItem().getID() + "/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) - .andExpect(jsonPath("$.linkset[0].cite-as[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + .andExpect(status().isUnauthorized()); } @Test @@ -265,11 +259,7 @@ public void findOneWithdrawnItemJsonLinksets() throws Exception { context.restoreAuthSystemState(); getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) - .andExpect(jsonPath("$.linkset[0].cite-as[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + .andExpect(status().isUnauthorized()); } @Test @@ -284,11 +274,7 @@ public void findOneEmbargoItemJsonLinksets() throws Exception { context.restoreAuthSystemState(); getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) - .andExpect(jsonPath("$.linkset[0].cite-as[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + .andExpect(status().isUnauthorized()); } @Test @@ -305,11 +291,7 @@ public void findOneRestrictedItemJsonLinksets() throws Exception { context.restoreAuthSystemState(); getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) - .andExpect(jsonPath("$.linkset[0].cite-as[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + .andExpect(status().isUnauthorized()); } @Test @@ -390,20 +372,22 @@ public void findOneItemLsetLinksets() throws Exception { } @Test - public void findTypedLinkForItem() throws Exception { + public void findTypedLinkForItemWithAuthor() throws Exception { String bitstreamContent = "ThisIsSomeDummyText"; String bitstreamMimeType = "text/plain"; String orcidValue = "orcidValue"; context.turnOffAuthorisationSystem(); - Item author = ItemBuilder.createItem(context, collection) - .withMetadata("dspace", "entity", "type", AUTHOR) + Collection personCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType(PERSON_ENTITY_TYPE) + .build(); + + Item author = ItemBuilder.createItem(context, personCollection) + .withPersonIdentifierLastName("familyName") + .withPersonIdentifierFirstName("firstName") .withMetadata(PERSON.getName(), "identifier", "orcid", orcidValue) .build(); - List metadata = itemService.getMetadata(author, "dspace", "entity", - "type", Item.ANY, false); - itemService.removeMetadataValues(context, author, List.of(metadata.get(0))); Item publication = ItemBuilder.createItem(context, collection) .withTitle("Item Test") .withMetadata("dc", "identifier", "doi", doi) @@ -420,7 +404,7 @@ public void findTypedLinkForItem() throws Exception { } EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); - EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, AUTHOR).build(); + EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, PERSON_ENTITY_TYPE).build(); RelationshipType isAuthorOfPublicationRelationshipType = RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publicationEntityType, authorEntityType, "isAuthorOfPublication", "isPublicationOfAuthor", @@ -434,16 +418,21 @@ public void findTypedLinkForItem() throws Exception { context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); + String dcIdentifierUriMetadataValue = itemService + .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); + getClient().perform(get("/signposting/links/" + publication.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.hasSize(4))) + Matchers.hasSize(5))) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + "&& @.rel == 'author' " + "&& @.type == 'text/html')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(doiPattern, doi) + "' " + "&& @.rel == 'cite-as')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + + "&& @.rel == 'cite-as')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/bitstreams/" + bitstream.getID() + "/download' " + "&& @.rel == 'item' " + "&& @.type == 'text/plain')]").exists()) @@ -569,18 +558,7 @@ public void findTypedLinkForRestrictedBitstream() throws Exception { String uiUrl = configurationService.getProperty("dspace.ui.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", - Matchers.hasSize(3))) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + - "&& @.rel == 'collection' " + - "&& @.type == 'text/html')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + - "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + - "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json')]").exists()); + .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -613,18 +591,7 @@ public void findTypedLinkForBitstreamUnderEmbargo() throws Exception { String uiUrl = configurationService.getProperty("dspace.ui.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", - Matchers.hasSize(3))) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + - "&& @.rel == 'collection' " + - "&& @.type == 'text/html')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + - "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + - "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json')]").exists()); + .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -656,18 +623,7 @@ public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { String uiUrl = configurationService.getProperty("dspace.ui.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", - Matchers.hasSize(3))) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/items/" + item.getID() + "' " + - "&& @.rel == 'collection' " + - "&& @.type == 'text/html')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + - "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + - "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json')]").exists()); + .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); diff --git a/dspace/config/spring/rest/signposting.xml b/dspace/config/spring/rest/signposting.xml index ed0c8912eb00..c8bc63c6f0dd 100644 --- a/dspace/config/spring/rest/signposting.xml +++ b/dspace/config/spring/rest/signposting.xml @@ -1,6 +1,7 @@ + xmlns:util="http://www.springframework.org/schema/util" + xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> @@ -10,31 +11,44 @@ - - + + + This metadata field must be used on Person entity. + - - - + + - - + + + + + + + + + + - + - + - - - From f7ff85bbd55f6f7897c76b61782e4017d326de07 Mon Sep 17 00:00:00 2001 From: Mohamed Saber Eskander Date: Wed, 7 Jun 2023 12:53:19 +0300 Subject: [PATCH 280/686] solved issue of 'could not initialize proxy - no Session' --- .../bulkaccesscontrol/BulkAccessControl.java | 9 +- .../BulkAccessControlIT.java | 110 ++++++++++++++++++ 2 files changed, 118 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index c85ebf65f214..50e1022dbe37 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -184,7 +184,6 @@ public void internalRun() throws Exception { updateItemsAndBitstreamsPolices(accessControl); context.complete(); } catch (Exception e) { - e.printStackTrace(); handler.handleException(e); context.abort(); } @@ -481,6 +480,14 @@ private void setItemPolicies(Item item, BulkAccessControlInput accessControl) private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) { AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); + // look over all the bundles and force initialization of bitstreams collection + // to avoid lazy initialization exception + long count = item.getBundles() + .stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .count(); + item.getBundles(CONTENT_BUNDLE_NAME).stream() .flatMap(bundle -> bundle.getBitstreams().stream()) .filter(bitstream -> constraints == null || diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index 02fe4a0bb597..73f02e40494c 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -1709,6 +1709,116 @@ public void performBulkAccessWithReplaceModeAndAppendModeIsEnabledTest() throws } } + @Test + public void performBulkAccessWithReplaceModeOnItemsWithMultipleBundlesTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group adminGroup = groupService.findByName(context, Group.ADMIN); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection one") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collection).build(); + Item itemTwo = ItemBuilder.createItem(context, collection).build(); + ItemBuilder.createItem(context, collection).build(); + + Bundle bundleOne = BundleBuilder.createBundle(context, itemOne) + .withName("ORIGINAL") + .build(); + + Bundle bundleTwo = BundleBuilder.createBundle(context, itemTwo) + .withName("ORIGINAL") + .build(); + + BundleBuilder.createBundle(context, itemTwo) + .withName("ORIGINAL") + .build(); + + BundleBuilder.createBundle(context, itemOne) + .withName("TEXT") + .build(); + + Bitstream bitstreamOne; + Bitstream bitstreamTwo; + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstreamOne = + BitstreamBuilder.createBitstream(context, bundleOne, is) + .withName("bistream of bundle one") + .build(); + } + + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstreamTwo = + BitstreamBuilder.createBitstream(context, bundleTwo, is) + .withName("bitstream of bundle two") + .build(); + } + + context.restoreAuthSystemState(); + + String jsonOne = "{\n" + + " \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": []\n" + + " },\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"administrator\",\n" + + " \"startDate\": null,\n" + + " \"endDate\": null\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunity.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Bitstream {" + bitstreamOne.getID() + + "} policy to access conditions:{administrator}"), + containsString("Replacing Bitstream {" + bitstreamTwo.getID() + + "} policy to access conditions:{administrator}") + )); + + bitstreamOne = context.reloadEntity(bitstreamOne); + bitstreamTwo = context.reloadEntity(bitstreamTwo); + + assertThat(bitstreamOne.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamOne.getResourcePolicies(), hasItem( + matches(READ, adminGroup, "administrator", TYPE_CUSTOM) + )); + + assertThat(bitstreamTwo.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamTwo.getResourcePolicies(), hasItem( + matches(READ, adminGroup, "administrator", TYPE_CUSTOM) + )); + } + @Test public void performBulkAccessWithHelpParamTest() throws Exception { From f8e3dad2349941aff8e68bbfa048159639e42039 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Thu, 8 Jun 2023 00:08:45 +0200 Subject: [PATCH 281/686] [CST-5728] LYRASIS [Donated]: DSpace7: Implement signposting pattern in DSpace (REST) --- .../controller/LinksetRestController.java | 20 +++++++-- .../controller/LinksetRestControllerIT.java | 44 +++++++++++++++++++ 2 files changed, 60 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index 7a1271454302..fa6461a76f95 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest.signposting.controller; +import static java.lang.String.format; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; import java.sql.SQLException; @@ -38,6 +39,7 @@ import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; +import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; @@ -83,17 +85,18 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) try { Context context = ContextUtil.obtainContext(request); - Item dso = itemService.find(context, uuid); - if (dso == null) { + Item item = itemService.find(context, uuid); + if (item == null) { throw new ResourceNotFoundException("No such Item: " + uuid); } + verifyItemIsDiscoverable(item); List linksetNodes = new ArrayList<>(); - if (dso.getType() == Constants.ITEM) { + if (item.getType() == Constants.ITEM) { List ispp = new DSpace().getServiceManager() .getServicesByType(ItemSignpostingProcessor.class); for (ItemSignpostingProcessor sp : ispp) { - sp.addLinkSetNodes(context, request, dso, linksetNodes); + sp.addLinkSetNodes(context, request, item, linksetNodes); } } return converter.toRest(LinksetMapper.map(linksetNodes), utils.obtainProjection()); @@ -113,6 +116,7 @@ public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) if (item == null) { throw new ResourceNotFoundException("No such Item: " + uuid); } + verifyItemIsDiscoverable(item); List linksetNodes = new ArrayList<>(); List ispp = new DSpace().getServiceManager() @@ -151,6 +155,7 @@ public List getHeader(HttpServletRequest request, @PathVariable U List linksetNodes = new ArrayList<>(); if (dso.getType() == Constants.ITEM) { + verifyItemIsDiscoverable((Item) dso); List ispp = new DSpace().getServiceManager() .getServicesByType(ItemSignpostingProcessor.class); for (ItemSignpostingProcessor sp : ispp) { @@ -171,4 +176,11 @@ public List getHeader(HttpServletRequest request, @PathVariable U throw new RuntimeException(e); } } + + private static void verifyItemIsDiscoverable(Item item) { + if (!item.isDiscoverable()) { + String message = format("Item with uuid [%s] is not Discoverable", item.getID().toString()); + throw new AccessDeniedException(message); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 180ad3672170..b976e40bf27e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -294,6 +294,20 @@ public void findOneRestrictedItemJsonLinksets() throws Exception { .andExpect(status().isUnauthorized()); } + @Test + public void findOneUnDiscoverableItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneBitstreamJsonLinksets() throws Exception { String bitstreamContent = "ThisIsSomeDummyText"; @@ -371,6 +385,19 @@ public void findOneItemLsetLinksets() throws Exception { .andExpect(content().string(Matchers.containsString(typeRelation))); } + @Test + public void findOneUnDiscoverableItemLsetLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID()) + .header("Accept", "application/linkset")) + .andExpect(status().isUnauthorized()); + } + @Test public void findTypedLinkForItemWithAuthor() throws Exception { String bitstreamContent = "ThisIsSomeDummyText"; @@ -630,4 +657,21 @@ public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { choiceAuthorityService.clearCache(); } + @Test + public void findTypedLinkForUnDiscoverableItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + item.getID()) + .header("Accept", "application/json")) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + } From 39df261411729e9e6ac49b2c82dacb5a6174ccb0 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Thu, 8 Jun 2023 11:49:30 +0200 Subject: [PATCH 282/686] [CST-5728] LYRASIS [Donated]: DSpace7: Implement signposting pattern in DSpace (REST) --- .../rest/signposting/controller/LinksetRestController.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index fa6461a76f95..d7718dce0139 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -139,6 +139,10 @@ public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) } } + // In @PreAuthorize(...) we're using "&&" (and) instead of "||" (or) because if hasPermission() is unable + // to find object of specified type with specified uuid it returns "true". + // For example: if we pass uuid of Bitstream: hasPermission(#uuid, 'ITEM', 'READ') returns "true", because + // it will use ItemService with uuid of bitstream. @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ') && hasPermission(#uuid, 'BITSTREAM', 'READ')") @RequestMapping(value = "/links" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) public List getHeader(HttpServletRequest request, @PathVariable UUID uuid) { From 7757c4e898373fc06b03e070eca67bd58d2ad4cf Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Thu, 8 Jun 2023 13:50:11 +0200 Subject: [PATCH 283/686] [DURACOM-153] fix validation to use the retrieved zip file on saf import --- .../src/main/java/org/dspace/app/itemimport/ItemImport.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index c94e163243c1..b32de11f7a7f 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -332,17 +332,19 @@ protected void process(Context context, ItemImportService itemImportService, */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); if (!remoteUrl) { // manage zip via upload optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); } else { // manage zip via remote url optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); } - if (optionalFileStream.isPresent()) { + if (validationFileStream.isPresent()) { // validate zip file - Optional validationFileStream = handler.getFileStream(context, zipfilename); if (validationFileStream.isPresent()) { validateZip(validationFileStream.get()); } From a00dd83a0c9a3272ff3004c40bfcf03691664296 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Thu, 8 Jun 2023 20:47:27 +0200 Subject: [PATCH 284/686] 102124: Avoid null pointer when checking disposition --- .../java/org/dspace/app/rest/BitstreamRestController.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java index a81d62b60148..b4de375fe882 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java @@ -208,6 +208,10 @@ private boolean isNotAnErrorResponse(HttpServletResponse response) { } private boolean checkFormatForContentDisposition(BitstreamFormat format) { + // never automatically download undefined formats + if (format == null) { + return false; + } List formats = List.of((configurationService.getArrayProperty("webui.content_disposition_format"))); boolean download = formats.contains(format.getMIMEType()); if (!download) { From 1b06c644548c6d34154a2dfd5162903c56539a7f Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Thu, 8 Jun 2023 21:49:13 +0200 Subject: [PATCH 285/686] 102052: Remove getPrimaryBitstream from PrimaryBitstreamController --- .../app/rest/PrimaryBitstreamController.java | 25 ------------------- .../BundlePrimaryBitstreamLinkRepository.java | 4 +++ 2 files changed, 4 insertions(+), 25 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java index 59ab1bba763a..e0357b8a4117 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java @@ -16,7 +16,6 @@ import org.dspace.app.rest.converter.ConverterService; import org.dspace.app.rest.exception.UnprocessableEntityException; -import org.dspace.app.rest.model.BitstreamRest; import org.dspace.app.rest.model.BundleRest; import org.dspace.app.rest.model.hateoas.BundleResource; import org.dspace.app.rest.repository.BundlePrimaryBitstreamLinkRepository; @@ -53,30 +52,6 @@ public class PrimaryBitstreamController { @Autowired private Utils utils; - /** - * This method retrieves a primaryBitstream on the given Bundle. - * Returns null if Bundle doesn't have a primaryBitstream. - *
- * curl -X GET "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" - * - * - * @param uuid The UUID of the Bundle of which the primaryBitstream will be retrieved - * @param request The HttpServletRequest - * @return The primaryBitstream, or null if not found - */ - @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'READ')") - @RequestMapping(method = RequestMethod.GET) - public ResponseEntity> getPrimaryBitstream(@PathVariable UUID uuid, - HttpServletRequest request) { - BitstreamRest bitstreamRest = repository.getPrimaryBitstream(null, uuid, null, utils.obtainProjection()); - if (bitstreamRest == null) { - return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); - } else { - return ControllerUtils.toResponseEntity(HttpStatus.OK, new HttpHeaders(), - (RepresentationModel) converter.toResource(bitstreamRest)); - } - } - /** * This method creates a primaryBitstream on the given Bundle. *
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java index a013aa0dd002..607751453e7f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java @@ -39,6 +39,10 @@ public class BundlePrimaryBitstreamLinkRepository extends AbstractDSpaceRestRepo /** * Retrieves the primaryBitstream of a Bundle. + * Returns null if Bundle doesn't have a primaryBitstream. + *
+ * curl -X GET "http://{dspace.server.url}/api/core/bundles/{bundle-uuid}/primaryBitstream" + * * * @param request The HttpServletRequest if relevant * @param bundleId The UUID of the Bundle From c5028f7f1febab43f65528a200bb3381ea5d0542 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Thu, 8 Jun 2023 23:08:48 +0200 Subject: [PATCH 286/686] 102052: Small improvements --- .../app/rest/PrimaryBitstreamController.java | 3 +- .../BundlePrimaryBitstreamLinkRepository.java | 8 ++- .../rest/PrimaryBitstreamControllerIT.java | 64 +++++++++---------- 3 files changed, 40 insertions(+), 35 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java index e0357b8a4117..c236954dab48 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java @@ -128,8 +128,7 @@ public ResponseEntity> deletePrimaryBitstream(@PathVariab private Bitstream getBitstreamFromRequest(Context context, HttpServletRequest request) { List dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request)); if (dsoList.size() != 1 || dsoList.get(0).getType() != BITSTREAM) { - throw new UnprocessableEntityException("No bitstream has been specified " + - "or the data cannot be resolved to a bitstream."); + throw new UnprocessableEntityException("URI does not resolve to an existing bitstream."); } return (Bitstream) dsoList.get(0); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java index 607751453e7f..3d11379cd328 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundlePrimaryBitstreamLinkRepository.java @@ -131,8 +131,14 @@ public void deletePrimaryBitstream(Context context, UUID bundleId) { * @param context The current DSpace context * @param bundleId The UUID of the Bundle * @param bitstream The Bitstream to use as primaryBitstream - * @param shouldBeSet Whether a primaryBitstream is should already be set + * @param shouldBeSet Whether a primaryBitstream should already be set: + * primaryBitstream should be present before updating or deleting, + * it should be null before adding * @return The Bundle + * @throws ResourceNotFoundException if the bundle is not found + * @throws DSpaceBadRequestException if primaryBitstream exists during an POST, + * if primaryBitstream is null during an UPDATE or DELETE + * @throws UnprocessableEntityException if the bundle does not contain the bitstream */ private Bundle setPrimaryBitstream(Context context, UUID bundleId, Bitstream bitstream, boolean shouldBeSet) throws SQLException { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java index d8bc6c8cf98a..ba1039bce631 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -82,7 +82,7 @@ public void testGetPrimaryBitstream() throws Exception { bundle.setPrimaryBitstreamID(bitstream); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get(getBundleUrl(bundle.getID()))) + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(bundle.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$", BitstreamMatcher.matchProperties(bitstream))); } @@ -90,14 +90,14 @@ public void testGetPrimaryBitstream() throws Exception { @Test public void testGetPrimaryBitstreamBundleNotFound() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get(getBundleUrl(UUID.randomUUID()))) + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(UUID.randomUUID()))) .andExpect(status().isNotFound()); } @Test public void testGetPrimaryBitstreamNonExisting() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get(getBundleUrl(bundle.getID()))) + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(bundle.getID()))) .andExpect(status().isNoContent()) .andExpect(jsonPath("$").doesNotExist()); } @@ -105,7 +105,7 @@ public void testGetPrimaryBitstreamNonExisting() throws Exception { @Test public void testPostPrimaryBitstream() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream.getID()))) .andExpect(status().isCreated()) @@ -119,7 +119,7 @@ public void testPostPrimaryBitstream() throws Exception { @Test public void testPostPrimaryBitstreamBundleNotFound() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(post(getBundleUrl(UUID.randomUUID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(UUID.randomUUID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream.getID()))) .andExpect(status().isNotFound()); @@ -131,7 +131,7 @@ public void testPostPrimaryBitstreamBundleNotFound() throws Exception { @Test public void testPostPrimaryBitstreamInvalidBitstream() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(UUID.randomUUID()))) .andExpect(status().isUnprocessableEntity()); @@ -148,7 +148,7 @@ public void testPostPrimaryBitstreamAlreadyExists() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isBadRequest()); @@ -165,7 +165,7 @@ public void testPostPrimaryBitstreamNotInBundle() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isUnprocessableEntity()); @@ -186,7 +186,7 @@ public void testPostPrimaryBitstreamCommunityAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle2.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isCreated()) @@ -208,7 +208,7 @@ public void testPostPrimaryBitstreamCollectionAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle2.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isCreated()) @@ -229,7 +229,7 @@ public void testPostPrimaryBitstreamItemAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle2.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isCreated()) @@ -244,7 +244,7 @@ public void testPostPrimaryBitstreamItemAdmin() throws Exception { public void testPostPrimaryBitstreamForbidden() throws Exception { String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(post(getBundleUrl(bundle.getID())) + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream.getID()))) .andExpect(status().isForbidden()); @@ -252,7 +252,7 @@ public void testPostPrimaryBitstreamForbidden() throws Exception { @Test public void testPostPrimaryBitstreamUnauthenticated() throws Exception { - getClient().perform(post(getBundleUrl(bundle.getID())) + getClient().perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream.getID()))) .andExpect(status().isUnauthorized()); @@ -266,7 +266,7 @@ public void testUpdatePrimaryBitstream() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isOk()) @@ -280,7 +280,7 @@ public void testUpdatePrimaryBitstream() throws Exception { @Test public void testUpdatePrimaryBitstreamBundleNotFound() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(put(getBundleUrl(UUID.randomUUID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(UUID.randomUUID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream.getID()))) .andExpect(status().isNotFound()); @@ -291,7 +291,7 @@ public void testUpdatePrimaryBitstreamInvalidBitstream() throws Exception { bundle.setPrimaryBitstreamID(bitstream); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(UUID.randomUUID()))) .andExpect(status().isUnprocessableEntity()); @@ -303,7 +303,7 @@ public void testUpdatePrimaryBitstreamInvalidBitstream() throws Exception { @Test public void testUpdatePrimaryBitstreamNonExisting() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream.getID()))) .andExpect(status().isBadRequest()); @@ -321,7 +321,7 @@ public void testUpdatePrimaryBitstreamNotInBundle() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isUnprocessableEntity()); @@ -344,7 +344,7 @@ public void testUpdatePrimaryBitstreamCommunityAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle2.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream3.getID()))) .andExpect(status().isOk()) @@ -368,7 +368,7 @@ public void testUpdatePrimaryBitstreamCollectionAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle2.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream3.getID()))) .andExpect(status().isOk()) @@ -391,7 +391,7 @@ public void testUpdatePrimaryBitstreamItemAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle2.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream3.getID()))) .andExpect(status().isOk()) @@ -410,7 +410,7 @@ public void testUpdatePrimaryBitstreamForbidden() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(put(getBundleUrl(bundle.getID())) + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isForbidden()); @@ -423,7 +423,7 @@ public void testUpdatePrimaryBitstreamUnauthenticated() throws Exception { Bitstream bitstream2 = createBitstream(bundle); context.restoreAuthSystemState(); - getClient().perform(put(getBundleUrl(bundle.getID())) + getClient().perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) .contentType(textUriContentType) .content(getBitstreamUrl(bitstream2.getID()))) .andExpect(status().isUnauthorized()); @@ -434,7 +434,7 @@ public void testDeletePrimaryBitstream() throws Exception { bundle.setPrimaryBitstreamID(bitstream); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(bundle.getID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) .andExpect(status().isNoContent()); // verify primaryBitstream was actually deleted bundle = context.reloadEntity(bundle); @@ -447,14 +447,14 @@ public void testDeletePrimaryBitstream() throws Exception { @Test public void testDeletePrimaryBitstreamBundleNotFound() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(UUID.randomUUID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(UUID.randomUUID()))) .andExpect(status().isNotFound()); } @Test public void testDeletePrimaryBitstreamBundleNonExisting() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(bundle.getID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) .andExpect(status().isBadRequest()); // verify primaryBitstream is still null bundle = context.reloadEntity(bundle); @@ -474,7 +474,7 @@ public void testDeletePrimaryBitstreamCommunityAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(bundle2.getID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) .andExpect(status().isNoContent()); // verify primaryBitstream was actually deleted bundle2 = context.reloadEntity(bundle2); @@ -496,7 +496,7 @@ public void testDeletePrimaryBitstreamCollectionAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(bundle2.getID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) .andExpect(status().isNoContent()); // verify primaryBitstream was actually deleted bundle2 = context.reloadEntity(bundle2); @@ -517,7 +517,7 @@ public void testDeletePrimaryBitstreamItemAdmin() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(bundle2.getID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) .andExpect(status().isNoContent()); // verify primaryBitstream was actually deleted bundle2 = context.reloadEntity(bundle2); @@ -532,7 +532,7 @@ public void testDeletePrimaryBitstreamForbidden() throws Exception { bundle.setPrimaryBitstreamID(bitstream); String token = getAuthToken(eperson.getEmail(), password); - getClient(token).perform(delete(getBundleUrl(bundle.getID()))) + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) .andExpect(status().isForbidden()); } @@ -540,11 +540,11 @@ public void testDeletePrimaryBitstreamForbidden() throws Exception { public void testDeletePrimaryBitstreamUnauthenticated() throws Exception { bundle.setPrimaryBitstreamID(bitstream); - getClient().perform(delete(getBundleUrl(bundle.getID()))) + getClient().perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) .andExpect(status().isUnauthorized()); } - private String getBundleUrl(UUID uuid) { + private String getBundlePrimaryBitstreamUrl(UUID uuid) { return "/api/core/bundles/" + uuid + "/primaryBitstream"; } From 1b1830f037453ec5e58225cc23d8ec9767b8526d Mon Sep 17 00:00:00 2001 From: Yury Bondarenko Date: Fri, 9 Jun 2023 13:46:28 +0200 Subject: [PATCH 287/686] Remove unnecessary DescribeStep change As far as I can tell this never did anything - `relationship.*` is not a valid MDF, so it's unclear what it was supposed to do - everything keeps working if I leave this part out --- .../app/rest/submit/step/DescribeStep.java | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java index fb4fc5bb9449..ee81d14ca7a6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java @@ -7,9 +7,6 @@ */ package org.dspace.app.rest.submit.step; -import static org.apache.commons.collections4.CollectionUtils.isEmpty; -import static org.apache.commons.lang3.StringUtils.isBlank; - import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; @@ -31,10 +28,8 @@ import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.content.InProgressSubmission; -import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.RelationshipMetadataService; -import org.dspace.content.RelationshipMetadataValue; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.core.Utils; @@ -147,26 +142,6 @@ private void readField(InProgressSubmission obj, SubmissionStepConfig config, Da } } } - - if (input.isRelationshipField() && isBlank(input.getFieldName())) { - Item item = obj.getItem(); - String key = "relationship." + input.getRelationshipType(); - if (isEmpty(data.getMetadata().get(key))) { - data.getMetadata().put(key, new ArrayList<>()); - } - for (RelationshipMetadataValue metadataValue : - relationshipMetadataService.getRelationshipMetadata(item, true)) { - if (metadataValue.getMetadataField().getElement().equals(input.getRelationshipType())) { - MetadataValueRest dto = new MetadataValueRest(); - dto.setAuthority(metadataValue.getAuthority()); - dto.setConfidence(metadataValue.getConfidence()); - dto.setLanguage(metadataValue.getLanguage()); - dto.setPlace(metadataValue.getPlace()); - dto.setValue(metadataValue.getValue()); - data.getMetadata().get(key).add(dto); - } - } - } } } } From 1ab04518a31a0a9e8916a61688954a33206134cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 Jun 2023 14:42:46 +0000 Subject: [PATCH 288/686] Bump scala-library from 2.13.2 to 2.13.9 in /dspace-api Bumps [scala-library](https://github.com/scala/scala) from 2.13.2 to 2.13.9. - [Release notes](https://github.com/scala/scala/releases) - [Commits](https://github.com/scala/scala/compare/v2.13.2...v2.13.9) --- updated-dependencies: - dependency-name: org.scala-lang:scala-library dependency-type: direct:development ... Signed-off-by: dependabot[bot] --- dspace-api/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 8f20f423aab5..d41c51638c3a 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -917,7 +917,7 @@ org.scala-lang scala-library - 2.13.2 + 2.13.9 test
From ff504e35d1a5b721c47a3ddde7e4e909630631bd Mon Sep 17 00:00:00 2001 From: damian Date: Fri, 9 Jun 2023 16:56:26 +0200 Subject: [PATCH 289/686] Variable naming change. Javadocs addded. --- .../main/java/org/dspace/browse/ItemCounter.java | 13 +++++++++++++ .../main/java/org/dspace/content/Collection.java | 5 +++++ .../org/dspace/content/CollectionServiceImpl.java | 7 +++++++ .../src/main/java/org/dspace/content/Community.java | 5 +++++ .../org/dspace/content/CommunityServiceImpl.java | 7 +++++++ .../dspace/content/service/CollectionService.java | 8 ++++++++ .../dspace/content/service/CommunityService.java | 7 +++++++ .../app/rest/converter/CollectionConverter.java | 3 ++- .../app/rest/converter/CommunityConverter.java | 2 +- .../org/dspace/app/rest/model/CollectionRest.java | 10 +++++----- .../org/dspace/app/rest/model/CommunityRest.java | 10 +++++----- 11 files changed, 65 insertions(+), 12 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index da88b0657ff8..1718e07ab2ca 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -50,6 +50,12 @@ public class ItemCounter { */ private Context context; + /** + * This field is used to hold singular instance of a class. + * Singleton pattern is used but this class should be + * refactored to modern DSpace approach (injectible service). + */ + private static ItemCounter instance; protected ItemService itemService; @@ -73,6 +79,13 @@ public ItemCounter(Context context) throws ItemCountException { this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true); } + /** + * Get the singular instance of a class. + * It creates a new instance at the first usage of this method. + * + * @return instance af a class + * @throws ItemCountException when error occurs + */ public static ItemCounter getInstance() throws ItemCountException { if (instance == null) { instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext()); diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index 6ee61abf5720..6caae7f47b24 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -337,6 +337,11 @@ private CollectionService getCollectionService() { return collectionService; } + /** + * return count of the collection items + * + * @return int + */ public int countArchivedItem() { try { return collectionService.countArchivedItem(this); diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 0c5ac94e1c9f..a5fcf0c1aa37 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1069,6 +1069,13 @@ public List findAllCollectionsByEntityType(Context context, String e return collectionList; } + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ @Override public int countArchivedItem(Collection collection) throws ItemCountException { return ItemCounter.getInstance().getCount(collection); diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index d0477d116f3a..39705369e3d6 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -265,6 +265,11 @@ private CommunityService getCommunityService() { return communityService; } + /** + * return count of the community items + * + * @return int + */ public int countArchivedItem() { try { return communityService.getArchivedItems(this); diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index 496a06fd33a5..f693c62e9247 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -711,6 +711,13 @@ public int countTotal(Context context) throws SQLException { return communityDAO.countRows(context); } + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ @Override public int getArchivedItems(Community community) throws ItemCountException { return ItemCounter.getInstance().getCount(community); diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 3a562d4e5b51..d7f47882c085 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -470,5 +470,13 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu */ public List findAllCollectionsByEntityType(Context context, String entityType) throws SearchServiceException; + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ int countArchivedItem(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java index 65454f26ea72..9326da2203e9 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java @@ -294,5 +294,12 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu int countTotal(Context context) throws SQLException; + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ int getArchivedItems(Community community) throws ItemCountException; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java index 836af81ef016..a5d06b5b3b45 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java @@ -26,9 +26,10 @@ public class CollectionConverter extends DSpaceObjectConverter Date: Fri, 9 Jun 2023 22:23:00 +0200 Subject: [PATCH 290/686] 102052: 403 checks for Admin ITs in PrimaryBitstreamControllerIT --- .../rest/PrimaryBitstreamControllerIT.java | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java index ba1039bce631..b5c67c640fff 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -195,6 +195,12 @@ public void testPostPrimaryBitstreamCommunityAdmin() throws Exception { // verify primaryBitstream was actually added bundle2 = context.reloadEntity(bundle2); Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Community Admin can't set a primaryBitstream outside their own Community + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -217,6 +223,12 @@ public void testPostPrimaryBitstreamCollectionAdmin() throws Exception { // verify primaryBitstream was actually added bundle2 = context.reloadEntity(bundle2); Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Collection Admin can't set a primaryBitstream outside their own Collection + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -238,6 +250,12 @@ public void testPostPrimaryBitstreamItemAdmin() throws Exception { // verify primaryBitstream was actually added bundle2 = context.reloadEntity(bundle2); Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Item Admin can't set a primaryBitstream outside their own Item + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -353,6 +371,13 @@ public void testUpdatePrimaryBitstreamCommunityAdmin() throws Exception { // verify primaryBitstream was actually updated bundle2 = context.reloadEntity(bundle2); Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Community Admin can't update a primaryBitstream outside their own Community + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -377,6 +402,13 @@ public void testUpdatePrimaryBitstreamCollectionAdmin() throws Exception { // verify primaryBitstream was actually updated bundle2 = context.reloadEntity(bundle2); Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Collection Admin can't update a primaryBitstream outside their own Collection + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -400,6 +432,13 @@ public void testUpdatePrimaryBitstreamItemAdmin() throws Exception { // verify primaryBitstream was actually updated bundle2 = context.reloadEntity(bundle2); Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Item Admin can't update a primaryBitstream outside their own Item + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -482,6 +521,13 @@ public void testDeletePrimaryBitstreamCommunityAdmin() throws Exception { // verify bitstream itself still exists Assert.assertEquals(1, bundle2.getBitstreams().size()); Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Community Admin can't delete a primaryBitstream outside their own Community + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -504,6 +550,13 @@ public void testDeletePrimaryBitstreamCollectionAdmin() throws Exception { // verify bitstream itself still exists Assert.assertEquals(1, bundle2.getBitstreams().size()); Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Collection Admin can't delete a primaryBitstream outside their own Collection + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test @@ -525,6 +578,13 @@ public void testDeletePrimaryBitstreamItemAdmin() throws Exception { // verify bitstream itself still exists Assert.assertEquals(1, bundle2.getBitstreams().size()); Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Item Admin can't delete a primaryBitstream outside their own Item + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); } @Test From af8cf8671bdf99bf6390aa29c1fb1da275b7327a Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Mon, 12 Jun 2023 11:41:33 +0200 Subject: [PATCH 291/686] 102124: Update webui.content_disposition_format comment --- dspace/config/dspace.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index d81346012164..9adf065fac93 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1289,7 +1289,8 @@ webui.content_disposition_threshold = 8388608 #### Content Attachment Disposition Formats #### # # Set which mimetypes, file extensions will NOT be opened inline -# Files with these mimetypes/extensions will always be downloaded +# Files with these mimetypes/extensions will always be downloaded, +# regardless of the threshold above webui.content_disposition_format = text/richtext #### Multi-file HTML document/site settings ##### From 63e1519c6ecec30efc9222cbc38e6d10d86f2c23 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Tue, 13 Jun 2023 15:19:19 +0200 Subject: [PATCH 292/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../controller/LinksetRestController.java | 8 +-- .../bitstream/BitstreamTypeProcessor.java | 4 +- .../processor/item/ItemAuthorProcessor.java | 4 +- .../processor/item/ItemLicenseProcessor.java | 2 +- .../processor/item/ItemTypeProcessor.java | 4 +- .../controller/LinksetRestControllerIT.java | 58 +++++-------------- 6 files changed, 19 insertions(+), 61 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index d7718dce0139..8089c518fff0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -72,15 +72,9 @@ public ResponseEntity getAll() { return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).build(); } - @PreAuthorize("permitAll()") - @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) - public ResponseEntity getOne() { - return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).build(); - } - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/json", - method = RequestMethod.GET, produces = "application/linkset+json") + method = RequestMethod.GET) public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) { try { Context context = ContextUtil.obtainContext(request); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java index 2f46193ca81d..005a8009836d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java @@ -48,9 +48,7 @@ public void addLinkSetNodes(Context context, HttpServletRequest request, String type = bitstreamService.getMetadataFirstValue(bitstream, "dc", "type", null, Item.ANY); if (StringUtils.isNotBlank(type)) { String typeSchemeUri = mapConverterDSpaceToSchemaOrgUri.getValue(type); - linksetNodes.add( - new LinksetNode(typeSchemeUri, getRelation(), "text/html", buildAnchor(bitstream)) - ); + linksetNodes.add(new LinksetNode(typeSchemeUri, getRelation(), buildAnchor(bitstream))); } } catch (Exception e) { log.error(e.getMessage(), e); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java index e2dc26b82785..1bb215c46864 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java @@ -74,9 +74,7 @@ author, new MetadataFieldName(getOrcidMetadata()), ANY String authorLink = isBlank(getPattern()) ? authorOrcid : MessageFormat.format(getPattern(), authorOrcid); - linksetNodes.add( - new LinksetNode(authorLink, getRelation(), "text/html", buildAnchor(context, item)) - ); + linksetNodes.add(new LinksetNode(authorLink, getRelation(), buildAnchor(context, item))); } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java index 2c356129963e..1a26fa7695b1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java @@ -41,7 +41,7 @@ public void addLinkSetNodes(Context context, HttpServletRequest request, try { String licenseUrl = creativeCommonsService.getLicenseURL(context, item); if (StringUtils.isNotBlank(licenseUrl)) { - linksetNodes.add(new LinksetNode(licenseUrl, getRelation(), "text/html", buildAnchor(context, item))); + linksetNodes.add(new LinksetNode(licenseUrl, getRelation(), buildAnchor(context, item))); } } catch (Exception e) { log.error(e.getMessage(), e); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java index f1f41de055ee..ddd2da12d59a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java @@ -45,12 +45,12 @@ public ItemTypeProcessor(FrontendUrlService frontendUrlService) { public void addLinkSetNodes(Context context, HttpServletRequest request, Item item, List linksetNodes) { try { - linksetNodes.add(new LinksetNode(ABOUT_PAGE_URI, getRelation(), "text/html", buildAnchor(context, item))); + linksetNodes.add(new LinksetNode(ABOUT_PAGE_URI, getRelation(), buildAnchor(context, item))); String type = itemService.getMetadataFirstValue(item, "dc", "type", null, Item.ANY); if (StringUtils.isNotBlank(type)) { String typeSchemeUri = mapConverterDSpaceToSchemaOrgUri.getValue(type); linksetNodes.add( - new LinksetNode(typeSchemeUri, getRelation(), "text/html", buildAnchor(context, item)) + new LinksetNode(typeSchemeUri, getRelation(), buildAnchor(context, item)) ); } } catch (Exception e) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index b976e40bf27e..7375ee3322a6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -101,19 +101,6 @@ public void findAllItemsLinksets() throws Exception { .andExpect(status().isMethodNotAllowed()); } - @Test - public void findOneItemGenericLinksets() throws Exception { - context.turnOffAuthorisationSystem(); - Item item = ItemBuilder.createItem(context, collection) - .withTitle("Item Test") - .withMetadata("dc", "identifier", "doi", doi) - .build(); - context.restoreAuthSystemState(); - - getClient().perform(get("/signposting/linksets/" + item.getID())) - .andExpect(status().isMethodNotAllowed()); - } - @Test public void findOneItemJsonLinksets() throws Exception { context.turnOffAuthorisationSystem(); @@ -152,12 +139,8 @@ public void findOneItemJsonLinksetsWithType() throws Exception { Matchers.hasSize(2))) .andExpect(jsonPath("$.linkset[0].type[0].href", Matchers.hasToString("https://schema.org/AboutPage"))) - .andExpect(jsonPath("$.linkset[0].type[0].type", - Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[0].type[1].href", - Matchers.hasToString(articleUri))) - .andExpect(jsonPath("$.linkset[0].type[1].type", - Matchers.hasToString("text/html"))); + Matchers.hasToString(articleUri))); } @Test @@ -176,13 +159,8 @@ public void findOneItemJsonLinksetsWithLicence() throws Exception { Matchers.hasSize(1))) .andExpect(jsonPath("$.linkset[0].type[0].href", Matchers.hasToString("https://schema.org/AboutPage"))) - .andExpect(jsonPath("$.linkset[0].type[0].type", - Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[0].license[0].href", - Matchers.hasToString(licenceUrl))) - .andExpect(jsonPath("$.linkset[0].license[0].type", - Matchers.hasToString("text/html"))); - + Matchers.hasToString(licenceUrl))); } @Test @@ -216,8 +194,7 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); - getClient().perform(get("/signposting/linksets/" + item.getID() + "/json") - .header("Accept", "application/linkset+json")) + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", Matchers.hasSize(1))) @@ -329,22 +306,19 @@ public void findOneBitstreamJsonLinksets() throws Exception { } context.restoreAuthSystemState(); - getClient().perform(get("/signposting/linksets/" + bitstream.getID() + "/json") - .header("Accept", "application/linkset+json")) + getClient().perform(get("/signposting/linksets/" + bitstream.getID() + "/json")) .andExpect(status().isNotFound()); } @Test public void findOneCollectionJsonLinksets() throws Exception { - getClient().perform(get("/signposting/linksets/" + collection.getID() + "/json") - .header("Accept", "application/linkset+json")) + getClient().perform(get("/signposting/linksets/" + collection.getID() + "/json")) .andExpect(status().isNotFound()); } @Test public void findOneCommunityJsonLinksets() throws Exception { - getClient().perform(get("/signposting/linksets/" + parentCommunity.getID() + "/json") - .header("Accept", "application/linkset+json")) + getClient().perform(get("/signposting/linksets/" + parentCommunity.getID() + "/json")) .andExpect(status().isNotFound()); } @@ -374,12 +348,10 @@ public void findOneItemLsetLinksets() throws Exception { String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + "/download> ; rel=\"item\" ; " + "type=\"text/plain\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; - String typeRelation = " ; rel=\"type\" ; type=\"text/html\" ; anchor=\"" + - url + "/entities/publication/" + - item.getID() + "\" ,"; + String typeRelation = " ; rel=\"type\" ; anchor=\"" + + url + "/entities/publication/" + item.getID() + "\" ,"; - getClient().perform(get("/signposting/linksets/" + item.getID()) - .header("Accept", "application/linkset")) + getClient().perform(get("/signposting/linksets/" + item.getID())) .andExpect(content().string(Matchers.containsString(siteAsRelation))) .andExpect(content().string(Matchers.containsString(itemRelation))) .andExpect(content().string(Matchers.containsString(typeRelation))); @@ -393,8 +365,7 @@ public void findOneUnDiscoverableItemLsetLinksets() throws Exception { .build(); context.restoreAuthSystemState(); - getClient().perform(get("/signposting/linksets/" + item.getID()) - .header("Accept", "application/linkset")) + getClient().perform(get("/signposting/linksets/" + item.getID())) .andExpect(status().isUnauthorized()); } @@ -454,8 +425,7 @@ public void findTypedLinkForItemWithAuthor() throws Exception { .andExpect(jsonPath("$", Matchers.hasSize(5))) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + - "&& @.rel == 'author' " + - "&& @.type == 'text/html')]").exists()) + "&& @.rel == 'author')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(doiPattern, doi) + "' " + "&& @.rel == 'cite-as')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + @@ -464,8 +434,7 @@ public void findTypedLinkForItemWithAuthor() throws Exception { "&& @.rel == 'item' " + "&& @.type == 'text/plain')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + - "&& @.rel == 'type' " + - "&& @.type == 'text/html')]").exists()); + "&& @.rel == 'type')]").exists()); } @Test @@ -549,8 +518,7 @@ public void findTypedLinkForBitstreamWithType() throws Exception { "' && @.rel == 'linkset' " + "&& @.type == 'application/linkset+json')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/ScholarlyArticle' " + - "&& @.rel == 'type' " + - "&& @.type == 'text/html')]").exists()); + "&& @.rel == 'type')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); From ca7c75987e106ac99c335960e95c39bcf77882be Mon Sep 17 00:00:00 2001 From: damian Date: Tue, 13 Jun 2023 19:58:42 +0200 Subject: [PATCH 293/686] Items count are now disabled by default. Naming changes. --- .../src/main/java/org/dspace/browse/ItemCounter.java | 8 +++++--- .../src/main/java/org/dspace/content/Collection.java | 4 ++-- .../java/org/dspace/content/CollectionServiceImpl.java | 2 +- .../src/main/java/org/dspace/content/Community.java | 4 ++-- .../java/org/dspace/content/CommunityServiceImpl.java | 2 +- .../org/dspace/content/service/CollectionService.java | 2 +- .../java/org/dspace/content/service/CommunityService.java | 2 +- .../dspace/app/rest/converter/CollectionConverter.java | 2 +- .../org/dspace/app/rest/converter/CommunityConverter.java | 3 ++- dspace/config/dspace.cfg | 4 ++-- 10 files changed, 18 insertions(+), 15 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index 1718e07ab2ca..20c43fc37298 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -75,7 +75,7 @@ public ItemCounter(Context context) throws ItemCountException { this.dao = ItemCountDAOFactory.getInstance(this.context); this.itemService = ContentServiceFactory.getInstance().getItemService(); this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", true); + this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", false); this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true); } @@ -96,7 +96,9 @@ public static ItemCounter getInstance() throws ItemCountException { /** * Get the count of the items in the given container. If the configuration * value webui.strengths.show is equal to 'true' this method will return all - * archived items. If the configuration value webui.strengths.cache + * archived items. If the configuration value webui.strengths.show is equal to + * 'false' this method will return -1. + * If the configuration value webui.strengths.cache * is equal to 'true' this will return the cached value if it exists. * If it is equal to 'false' it will count the number of items * in the container in real time. @@ -107,7 +109,7 @@ public static ItemCounter getInstance() throws ItemCountException { */ public int getCount(DSpaceObject dso) throws ItemCountException { if (!showStrengths) { - return 0; + return -1; } if (useCache) { diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index 6caae7f47b24..53b63dbef1fa 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -342,9 +342,9 @@ private CollectionService getCollectionService() { * * @return int */ - public int countArchivedItem() { + public int countArchivedItems() { try { - return collectionService.countArchivedItem(this); + return collectionService.countArchivedItems(this); } catch (ItemCountException e) { throw new RuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index a5fcf0c1aa37..84ca1692ccf2 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1077,7 +1077,7 @@ public List findAllCollectionsByEntityType(Context context, String e * @throws ItemCountException */ @Override - public int countArchivedItem(Collection collection) throws ItemCountException { + public int countArchivedItems(Collection collection) throws ItemCountException { return ItemCounter.getInstance().getCount(collection); } } diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index 39705369e3d6..dd6d978936df 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -270,9 +270,9 @@ private CommunityService getCommunityService() { * * @return int */ - public int countArchivedItem() { + public int countArchivedItems() { try { - return communityService.getArchivedItems(this); + return communityService.countArchivedItems(this); } catch (ItemCountException e) { throw new RuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index f693c62e9247..15ac1c58a690 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -719,7 +719,7 @@ public int countTotal(Context context) throws SQLException { * @throws ItemCountException */ @Override - public int getArchivedItems(Community community) throws ItemCountException { + public int countArchivedItems(Community community) throws ItemCountException { return ItemCounter.getInstance().getCount(community); } } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index d7f47882c085..90db5c731402 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -478,5 +478,5 @@ public List findAllCollectionsByEntityType(Context context, String e * @return total collection archived items * @throws ItemCountException */ - int countArchivedItem(Collection collection) throws ItemCountException; + int countArchivedItems(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java index 9326da2203e9..c089bcec8df1 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java @@ -301,5 +301,5 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu * @return total community archived items * @throws ItemCountException */ - int getArchivedItems(Community community) throws ItemCountException; + int countArchivedItems(Community community) throws ItemCountException; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java index a5d06b5b3b45..e9b6aa03b85a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java @@ -26,7 +26,7 @@ public class CollectionConverter extends DSpaceObjectConverter Date: Wed, 14 Jun 2023 10:17:20 -0400 Subject: [PATCH 294/686] Rework message to make it more useful, more businesslike, not expose author's email by default. --- .../app/requestitem/RequestItemEmailNotifier.java | 2 +- .../org/dspace/app/rest/utils/ApplicationConfig.java | 2 +- dspace/config/emails/request_item.granted | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index c44a65d95462..1f0c2683da5d 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -178,7 +178,7 @@ public void sendResponse(Context context, RequestItem ri, String subject, Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "request_item.granted")); email.addArgument(ri.getReqName()); // {0} requestor's name - email.addArgument(ri.getItem().getID().toString()); // {1} URL of the requested Item + email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item email.addArgument(ri.getItem().getName()); // {2} title of the requested Item email.addArgument(grantorName); // {3} name of the grantor email.addArgument(grantorAddress); // {4} email of the grantor diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java index 492d650cab0f..23e217a5dd42 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ApplicationConfig.java @@ -27,7 +27,7 @@ // bean which is in the scope of both will be duplicated. dspace-services makes // its context the parent of this one. If a bean is explicitly configured in // the parent, it won't be so configured in this context and you may have -// trouble. IOW be careful what you add here. +// trouble. Be careful what you add here. @ComponentScan( { "org.dspace.app.rest.converter", "org.dspace.app.rest.repository", diff --git a/dspace/config/emails/request_item.granted b/dspace/config/emails/request_item.granted index 33d4e9a43376..290ae89b300c 100644 --- a/dspace/config/emails/request_item.granted +++ b/dspace/config/emails/request_item.granted @@ -11,16 +11,16 @@ #set($subject = 'Request for Copy of Restricted Document is Granted') Dear ${params[0]}: -In response to your request I have the pleasure to send you in attachment a copy of the file(s) concerning the document: +Your request for a copy of the file(s) from the below document has been approved by ${params[3]}. You may find the requested file(s) attached. - ${params[2]} - ${params[1]} - -of which I am an author. + ${params[2]} + ${params[1]} #if( $params[5] ) +An additional message from ${params[3]} follows: + ${params[5]} #end Best regards, -${params[3]} <${params[4]}> +The ${config.get('dspace.name')} Team From 411ac4a9a27262473e4e8965b771a72c246650fb Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 14 Jun 2023 10:21:01 -0400 Subject: [PATCH 295/686] Clarify template comments. --- dspace/config/emails/request_item.granted | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace/config/emails/request_item.granted b/dspace/config/emails/request_item.granted index 290ae89b300c..37ee5c29bd0c 100644 --- a/dspace/config/emails/request_item.granted +++ b/dspace/config/emails/request_item.granted @@ -3,10 +3,10 @@ ## ## Parameters: ## {0} name of the requestor -## {1} unique ID of the requested Item +## {1} Handle URL of the requested Item ## {2} title of the requested Item ## {3} name of the grantor -## {4} email address of the grantor +## {4} email address of the grantor (unused) ## {5} custom message sent by the grantor. #set($subject = 'Request for Copy of Restricted Document is Granted') Dear ${params[0]}: From 9caba7044b2c13b76549457c797e6a30194412a5 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 14 Jun 2023 10:53:49 -0400 Subject: [PATCH 296/686] Remove test for removed content. --- .../dspace/app/requestitem/RequestItemEmailNotifierTest.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java index 36525a912a31..5d0069e78bbb 100644 --- a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java @@ -164,9 +164,6 @@ public void testSendResponse() throws Exception { assertThat("Body should be a single text bodypart", content, instanceOf(String.class)); - assertThat("Should contain the helpdesk address", - (String)content, containsString(HELPDESK_ADDRESS)); - assertThat("Should contain the helpdesk name", (String)content, containsString(HELPDESK_NAME)); From bbb75df3a14a6a03a6a972ea74f4669e78debc7f Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 19 May 2023 13:04:10 -0500 Subject: [PATCH 297/686] Disable DTD parsing in all external source plugins to avoid XXEs --- .../dspace/ctask/general/MetadataWebService.java | 3 ++- .../external/provider/orcid/xml/Converter.java | 14 +++++++++++--- .../CiniiImportMetadataSourceServiceImpl.java | 6 ++++++ .../EpoImportMetadataSourceServiceImpl.java | 6 ++++++ .../PubmedImportMetadataSourceServiceImpl.java | 5 +++++ .../PubmedEuropeMetadataSourceServiceImpl.java | 4 ++++ .../ScopusImportMetadataSourceServiceImpl.java | 4 ++++ .../WOSImportMetadataSourceServiceImpl.java | 4 ++++ 8 files changed, 42 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java index f7ab18c01e54..5891fa017cb0 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java @@ -207,9 +207,10 @@ public void init(Curator curator, String taskId) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); try { - // disallow DTD parsing to ensure no XXE attacks can occur. + // disallow DTD parsing to ensure no XXE attacks can occur // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + factory.setXIncludeAware(false); docBuilder = factory.newDocumentBuilder(); } catch (ParserConfigurationException pcE) { log.error("caught exception: " + pcE); diff --git a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java index 8f48cda712bc..756b8654f285 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java @@ -12,6 +12,9 @@ import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; import org.xml.sax.SAXException; @@ -28,11 +31,16 @@ public abstract class Converter { protected Object unmarshall(InputStream input, Class type) throws SAXException, URISyntaxException { try { + XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory(); + // disallow DTD parsing to ensure no XXE attacks can occur + xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); + XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(input); + JAXBContext context = JAXBContext.newInstance(type); Unmarshaller unmarshaller = context.createUnmarshaller(); - return unmarshaller.unmarshal(input); - } catch (JAXBException e) { - throw new RuntimeException("Unable to unmarshall orcid message" + e); + return unmarshaller.unmarshal(xmlStreamReader); + } catch (JAXBException | XMLStreamException e) { + throw new RuntimeException("Unable to unmarshall orcid message: " + e); } } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java index 5eff46c790e4..587ad5b25838 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java @@ -303,6 +303,8 @@ protected List search(String id, String appId) private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); return root.getChildren(); @@ -356,6 +358,8 @@ private List getCiniiIds(String appId, Integer maxResult, String author, String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); int url_len = this.url.length() - 1; SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); List namespaces = Arrays.asList( @@ -418,6 +422,8 @@ private Integer countCiniiElement(String appId, Integer maxResult, String author String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); List namespaces = Arrays diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java index 7240e356e371..fbae302bca6a 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java @@ -398,6 +398,8 @@ private Integer countDocument(String bearer, String query) { String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -435,6 +437,8 @@ private List searchDocumentIds(String bearer, String query, int s String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -486,6 +490,8 @@ private List searchDocument(String bearer, String id, String docTy private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); List namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange")); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index 933d6b1446d8..a6cfa625bbcf 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -351,6 +351,11 @@ public Collection call() throws Exception { private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // Disallow external entities & entity expansion to protect against XXE attacks + // (NOTE: We receive errors if we disable all DTDs for PubMed, so this is the best we can do) + saxBuilder.setFeature("http://xml.org/sax/features/external-general-entities", false); + saxBuilder.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + saxBuilder.setExpandEntities(false); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java index 1ec0da74206e..92d7d9fbd3fe 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java @@ -294,6 +294,8 @@ public Integer count(String query) throws URISyntaxException, ClientProtocolExce String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params); SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); Element element = root.getChild("hitCount"); @@ -365,6 +367,8 @@ public List search(String query, Integer size, Integer start) thro String cursorMark = StringUtils.EMPTY; if (StringUtils.isNotBlank(response)) { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); XPathFactory xpfac = XPathFactory.instance(); XPathExpression xPath = xpfac.compile("//responseWrapper/resultList/result", diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java index d0c2fb078a2c..944d467e3156 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java @@ -202,6 +202,8 @@ public Integer call() throws Exception { String response = liveImportClient.executeHttpGetRequest(timeout, url, params); SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -377,6 +379,8 @@ private Map getRequestParameters(String query, String viewMode, private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); List records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom")); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java index 2ccdc12b8db2..f550b659952b 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java @@ -147,6 +147,8 @@ public Integer call() throws Exception { String response = liveImportClient.executeHttpGetRequest(timeout, url, params); SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); XPathExpression xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]", @@ -285,6 +287,8 @@ private boolean isIsi(String query) { private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]", From 7c7f3e2853bf6f27c09686684b83c0a798a45211 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Wed, 14 Jun 2023 23:47:36 +0200 Subject: [PATCH 298/686] 102124: Move content-disposition test config from local.cfg to IT itself --- dspace-api/src/test/data/dspaceFolder/config/local.cfg | 6 ------ .../java/org/dspace/app/rest/BitstreamRestControllerIT.java | 6 ++++++ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 144116f212eb..3c19a68e9fd1 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -144,9 +144,3 @@ authentication-ip.Student = 6.6.6.6 useProxies = true proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.include_ui_ip = true - -########################### -# BITSTREAM CONFIGURATION # -########################### -# Files with these mimetypes, extensions will never be opened inline, and should be downloaded -webui.content_disposition_format = text/richtext, text/xml, txt diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 9b4aeed1e208..e2166ad42591 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -972,6 +972,12 @@ public void updateBitstreamFormatAdmin() throws Exception { @Test public void checkContentDispositionOfFormats() throws Exception { + configurationService.setProperty("webui.content_disposition_format", new String[] { + "text/richtext", + "text/xml", + "txt" + }); + context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); Collection collection = CollectionBuilder.createCollection(context, community).build(); From 1cb60a90183e4d874a05b24ad3f0320131553c53 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Wed, 14 Jun 2023 23:48:27 +0200 Subject: [PATCH 299/686] Hide publicdomain by default since it's now the same as CC0 More info: https://api.creativecommons.org/docs/readme_15.html#changes-since-1-0 --- dspace/config/dspace.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 4e6a6799274b..b8a149599b15 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -968,7 +968,7 @@ metadata.hide.person.email = true #### Creative Commons settings ###### # The url to the web service API -cc.api.rooturl = http://api.creativecommons.org/rest/1.5 +cc.api.rooturl = https://api.creativecommons.org/rest/1.5 # Metadata field to hold CC license URI of selected license cc.license.uri = dc.rights.uri @@ -985,12 +985,12 @@ cc.submit.addbitstream = true # A list of license classes that should be excluded from selection process # class names - comma-separated list - must exactly match what service returns. # At time of implementation, these are: -# publicdomain - "Public Domain" +# publicdomain - "Public Domain" (this is now the same as CC0) # standard - "Creative Commons" # recombo - "Sampling" # zero - "CC0" # mark - "Public Domain Mark" -cc.license.classfilter = recombo, mark +cc.license.classfilter = publicdomain, recombo, mark # Jurisdiction of the creative commons license -- is it ported or not? # Use the key from the url seen in the response from the api call, From 8adb16f9453edb510839d97f9ad86caf15362e5c Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Thu, 15 Jun 2023 17:01:00 -0400 Subject: [PATCH 300/686] Invent a rejection letter --- .../requestitem/RequestItemEmailNotifier.java | 2 +- .../RequestItemEmailNotifierTest.java | 103 ++++++++++++++++-- dspace/config/emails/request_item.rejected | 26 +++++ 3 files changed, 123 insertions(+), 8 deletions(-) create mode 100644 dspace/config/emails/request_item.rejected diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 1f0c2683da5d..6499c45a7830 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -176,7 +176,7 @@ public void sendResponse(Context context, RequestItem ri, String subject, // Build an email back to the requester. Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), - "request_item.granted")); + ri.isAccept_request() ? "request_item.granted" : "request_item.rejected")); email.addArgument(ri.getReqName()); // {0} requestor's name email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item email.addArgument(ri.getItem().getName()); // {2} title of the requested Item diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java index 5d0069e78bbb..713e007c58a2 100644 --- a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java @@ -48,6 +48,13 @@ public class RequestItemEmailNotifierTest public static final String TRANSPORT_CLASS_KEY = "mail.smtp.class"; + private static final String REQUESTOR_ADDRESS = "mhwood@wood.net"; + private static final String REQUESTOR_NAME = "Mark Wood"; + private static final String HELPDESK_ADDRESS = "help@example.com"; + private static final String HELPDESK_NAME = "Help Desk"; + private static final String TEST_MESSAGE = "Message"; + private static final String DUMMY_PROTO = "dummy"; + private static ConfigurationService configurationService; private static BitstreamService bitstreamService; private static HandleService handleService; @@ -86,13 +93,6 @@ public void testSendRequest() throws Exception { */ @Test public void testSendResponse() throws Exception { - final String REQUESTOR_ADDRESS = "mhwood@wood.net"; - final String REQUESTOR_NAME = "Mark Wood"; - final String HELPDESK_ADDRESS = "help@example.com"; - final String HELPDESK_NAME = "Help Desk"; - final String TEST_MESSAGE = "Message"; - final String DUMMY_PROTO = "dummy"; - // Create some content to send. context.turnOffAuthorisationSystem(); Community com = CommunityBuilder.createCommunity(context) @@ -171,6 +171,95 @@ public void testSendResponse() throws Exception { (String)content, containsString(TEST_MESSAGE)); } + /** + * Test of sendResponse method -- rejection case. + * @throws java.lang.Exception passed through. + */ + @Test + public void testSendRejection() + throws Exception { + // Create some content to send. + context.turnOffAuthorisationSystem(); + Community com = CommunityBuilder.createCommunity(context) + .withName("Top Community") + .build(); + Collection col = CollectionBuilder.createCollection(context, com) + .build(); + Item item = ItemBuilder.createItem(context, col) + .withTitle("Test Item") + .build(); + context.restoreAuthSystemState(); + + // Create a request to which we can respond. + RequestItem ri = new RequestItem(); + ri.setAccept_request(false); + ri.setItem(item); + ri.setAllfiles(true); + ri.setReqEmail(REQUESTOR_ADDRESS); + ri.setReqName(REQUESTOR_NAME); + + // Install a fake transport for RFC2822 email addresses. + Session session = DSpaceServicesFactory.getInstance().getEmailService().getSession(); + Provider transportProvider = new Provider(Provider.Type.TRANSPORT, + DUMMY_PROTO, JavaMailTestTransport.class.getCanonicalName(), + "DSpace", "1.0"); + session.addProvider(transportProvider); + session.setProvider(transportProvider); + session.setProtocolForAddress("rfc822", DUMMY_PROTO); + + // Configure the help desk strategy. + configurationService.setProperty("mail.helpdesk", HELPDESK_ADDRESS); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + + // Ensure that mail is "sent". + configurationService.setProperty("mail.server.disabled", "false"); + + // Instantiate and initialize the unit, using the "help desk" strategy. + RequestItemEmailNotifier requestItemEmailNotifier + = new RequestItemEmailNotifier( + DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), + RequestItemAuthorExtractor.class)); + requestItemEmailNotifier.bitstreamService = bitstreamService; + requestItemEmailNotifier.configurationService = configurationService; + requestItemEmailNotifier.handleService = handleService; + requestItemEmailNotifier.requestItemService = requestItemService; + + // Test the unit. Template supplies the Subject: value + requestItemEmailNotifier.sendResponse(context, ri, null, TEST_MESSAGE); + + // Evaluate the test results. + + // Check the To: address. + Address[] myAddresses = JavaMailTestTransport.getAddresses(); + assertEquals("Should have one To: address.", + myAddresses.length, 1); + assertThat("To: should be an Internet address", + myAddresses[0], instanceOf(InternetAddress.class)); + String address = ((InternetAddress)myAddresses[0]).getAddress(); + assertEquals("To: address should match requestor.", + ri.getReqEmail(), address); + + // Check the message body. + Message myMessage = JavaMailTestTransport.getMessage(); + + Object content = myMessage.getContent(); + assertThat("Body should be a single text bodypart", + content, instanceOf(String.class)); + + assertThat("Should contain the helpdesk name", + (String)content, containsString(HELPDESK_NAME)); + + assertThat("Should contain the test custom message", + (String)content, containsString(TEST_MESSAGE)); + + // FIXME Note that this depends on the content of the rejection template! + assertThat("Should contain the word 'denied'.", + (String)content, containsString("denied")); + } + /** * Test of requestOpenAccess method, of class RequestItemEmailNotifier. * @throws java.lang.Exception passed through. diff --git a/dspace/config/emails/request_item.rejected b/dspace/config/emails/request_item.rejected new file mode 100644 index 000000000000..c5a13860b648 --- /dev/null +++ b/dspace/config/emails/request_item.rejected @@ -0,0 +1,26 @@ +## Sent to the person requesting a copy of a restricted document when the +## request is denied. +## +## Parameters: +## {0} name of the requestor +## {1} Handle URL of the requested Item +## {2} title of the requested Item +## {3} name of the grantor +## {4} email address of the grantor (unused) +## {5} custom message sent by the grantor. +#set($subject = 'Request for Copy of Restricted Document is Denied') +Dear ${params[0]}: + +Your request for a copy of the file(s) from the below document has been denied by ${params[3]}. + + ${params[2]} + ${params[1]} +#if( $params[5] ) + +An additional message from ${params[3]} follows: + +${params[5]} +#end + +Best regards, +The ${config.get('dspace.name')} Team From 55c07e90cdc1cb5eff86b83333a8fb32cdcf5ff4 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Fri, 16 Jun 2023 01:53:48 +0200 Subject: [PATCH 301/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../controller/LinksetRestController.java | 8 +- .../LinksetHttpMessageConverter.java | 82 ------------------- .../LinksetRestMessageConverter.java | 49 +++++++++++ .../processor/item/ItemLinksetProcessor.java | 58 +++++++++++++ .../controller/LinksetRestControllerIT.java | 76 +++++++++++++++-- dspace/config/spring/rest/signposting.xml | 5 +- 6 files changed, 183 insertions(+), 95 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index 8089c518fff0..7932d94f7918 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -18,6 +18,7 @@ import javax.servlet.http.HttpServletRequest; import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.signposting.converter.LinksetRestMessageConverter; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRest; import org.dspace.app.rest.signposting.model.TypedLinkRest; @@ -100,9 +101,8 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) } @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") - @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, - method = RequestMethod.GET, produces = "application/linkset") - public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) { + @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + public String getLset(HttpServletRequest request, @PathVariable UUID uuid) { try { Context context = ContextUtil.obtainContext(request); @@ -127,7 +127,7 @@ public LinksetRest getLset(HttpServletRequest request, @PathVariable UUID uuid) linksetRest.getLinksetNodes().add(linksetNode); } } - return linksetRest; + return LinksetRestMessageConverter.convert(linksetRest); } catch (SQLException e) { throw new RuntimeException(e); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java deleted file mode 100644 index e5c187c9554b..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetHttpMessageConverter.java +++ /dev/null @@ -1,82 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.converter; - -import static java.lang.String.format; -import static java.util.Objects.nonNull; -import static org.apache.commons.lang.StringUtils.isNotBlank; - -import java.io.IOException; -import java.lang.reflect.Type; - -import org.apache.commons.lang.NotImplementedException; -import org.dspace.app.rest.signposting.model.LinksetRest; -import org.springframework.http.HttpInputMessage; -import org.springframework.http.HttpOutputMessage; -import org.springframework.http.MediaType; -import org.springframework.http.converter.AbstractGenericHttpMessageConverter; -import org.springframework.http.converter.HttpMessageNotReadableException; -import org.springframework.http.converter.HttpMessageNotWritableException; - -/** - * Converter for converting LinksetRest message into application/linkset format. - */ -public class LinksetHttpMessageConverter extends AbstractGenericHttpMessageConverter { - - public LinksetHttpMessageConverter() { - super(MediaType.valueOf("application/linkset")); - } - - @Override - protected void writeInternal(LinksetRest linksetRest, Type type, HttpOutputMessage outputMessage) - throws IOException, HttpMessageNotWritableException { - StringBuilder responseBody = new StringBuilder(); - linksetRest.getLinksetNodes().forEach(linksetNodes -> { - if (isNotBlank(linksetNodes.getLink())) { - responseBody.append(format("<%s> ", linksetNodes.getLink())); - } - if (nonNull(linksetNodes.getRelation())) { - responseBody.append(format("; rel=\"%s\" ", linksetNodes.getRelation().getName())); - } - if (isNotBlank(linksetNodes.getType())) { - responseBody.append(format("; type=\"%s\" ", linksetNodes.getType())); - } - if (isNotBlank(linksetNodes.getAnchor())) { - responseBody.append(format("; anchor=\"%s\" ", linksetNodes.getAnchor())); - } - responseBody.append(", "); - }); - outputMessage.getBody().write(responseBody.toString().trim().getBytes()); - outputMessage.getBody().flush(); - } - - @Override - protected LinksetRest readInternal(Class clazz, HttpInputMessage inputMessage) - throws HttpMessageNotReadableException { - throw new NotImplementedException(); - } - - @Override - protected boolean canRead(MediaType mediaType) { - return false; - } - - @Override - public boolean canWrite(Class clazz, MediaType mediaType) { - boolean isAppropriateClass = LinksetRest.class.isAssignableFrom(clazz); - boolean isAppropriateMediaType = getSupportedMediaTypes().stream() - .anyMatch(supportedType -> supportedType.isCompatibleWith(mediaType)); - return isAppropriateClass && isAppropriateMediaType; - } - - @Override - public LinksetRest read(Type type, Class contextClass, HttpInputMessage inputMessage) - throws IOException, HttpMessageNotReadableException { - throw new NotImplementedException(); - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java new file mode 100644 index 000000000000..03c27897b9cd --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.converter; + +import static java.lang.String.format; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import org.dspace.app.rest.signposting.model.LinksetRest; + +/** + * Converter for converting LinksetRest message into application/linkset format. + */ +public class LinksetRestMessageConverter { + + private LinksetRestMessageConverter() { + } + + /** + * Converts LinksetRest object into string of application/linkset format. + * + * @param linksetRest linkset rest object + * @return string of application/linkset format. + */ + public static String convert(LinksetRest linksetRest) { + StringBuilder responseBody = new StringBuilder(); + linksetRest.getLinksetNodes().forEach(linksetNodes -> { + if (isNotBlank(linksetNodes.getLink())) { + responseBody.append(format("<%s> ", linksetNodes.getLink())); + } + if (nonNull(linksetNodes.getRelation())) { + responseBody.append(format("; rel=\"%s\" ", linksetNodes.getRelation().getName())); + } + if (isNotBlank(linksetNodes.getType())) { + responseBody.append(format("; type=\"%s\" ", linksetNodes.getType())); + } + if (isNotBlank(linksetNodes.getAnchor())) { + responseBody.append(format("; anchor=\"%s\" ", linksetNodes.getAnchor())); + } + responseBody.append(", "); + }); + return responseBody.toString(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java new file mode 100644 index 000000000000..9008a28e29a6 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the linkset relation. + */ +public class ItemLinksetProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemLinksetProcessor.class); + + private final ConfigurationService configurationService; + + public ItemLinksetProcessor(FrontendUrlService frontendUrlService, + ConfigurationService configurationService) { + super(frontendUrlService); + this.configurationService = configurationService; + setRelation(LinksetRelationType.LINKSET); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + try { + if (item != null) { + String signpostingPath = configurationService.getProperty("signposting.path"); + String baseUrl = configurationService.getProperty("dspace.ui.url"); + + String linksetUrl = String.format("%s/%s/linksets/%s", baseUrl, signpostingPath, item.getID()); + String linksetJsonUrl = linksetUrl + "/json"; + String anchor = buildAnchor(context, item); + List links = List.of( + new LinksetNode(linksetUrl, getRelation(), "application/linkset", anchor), + new LinksetNode(linksetJsonUrl, getRelation(), "application/linkset+json", anchor) + ); + linksetNodes.addAll(links); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 7375ee3322a6..7b3b6543ef3e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -103,6 +103,8 @@ public void findAllItemsLinksets() throws Exception { @Test public void findOneItemJsonLinksets() throws Exception { + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) .withTitle("Item Test") @@ -115,11 +117,23 @@ public void findOneItemJsonLinksets() throws Exception { .andExpect(jsonPath("$.linkset", Matchers.hasSize(1))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + ; } @Test public void findOneItemJsonLinksetsWithType() throws Exception { + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); String articleUri = mapConverterDSpaceToSchemaOrgUri.getValue("Article"); context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) @@ -140,12 +154,23 @@ public void findOneItemJsonLinksetsWithType() throws Exception { .andExpect(jsonPath("$.linkset[0].type[0].href", Matchers.hasToString("https://schema.org/AboutPage"))) .andExpect(jsonPath("$.linkset[0].type[1].href", - Matchers.hasToString(articleUri))); + Matchers.hasToString(articleUri))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))); } @Test public void findOneItemJsonLinksetsWithLicence() throws Exception { String licenceUrl = "https://exmple.com/licence"; + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) .withTitle("Item Test") @@ -160,7 +185,16 @@ public void findOneItemJsonLinksetsWithLicence() throws Exception { .andExpect(jsonPath("$.linkset[0].type[0].href", Matchers.hasToString("https://schema.org/AboutPage"))) .andExpect(jsonPath("$.linkset[0].license[0].href", - Matchers.hasToString(licenceUrl))); + Matchers.hasToString(licenceUrl))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))); } @Test @@ -194,6 +228,7 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", @@ -209,7 +244,16 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { .andExpect(jsonPath("$.linkset[0].item[1].type", Matchers.hasToString(bitstream2MimeType))) .andExpect(jsonPath("$.linkset[0].anchor", - Matchers.hasToString(url + "/entities/publication/" + item.getID()))); + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))); } @Test @@ -343,6 +387,7 @@ public void findOneItemLsetLinksets() throws Exception { context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); String siteAsRelation = "<" + MessageFormat.format(doiPattern, doi) + "> ; rel=\"cite-as\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + @@ -350,11 +395,19 @@ public void findOneItemLsetLinksets() throws Exception { item.getID() + "\" ,"; String typeRelation = " ; rel=\"type\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String linksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "> ; rel=\"linkset\" ; type=\"application/linkset\" ;" + + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String jsonLinksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json> ; rel=\"linkset\" ; type=\"application/linkset+json\" ;" + + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; getClient().perform(get("/signposting/linksets/" + item.getID())) .andExpect(content().string(Matchers.containsString(siteAsRelation))) .andExpect(content().string(Matchers.containsString(itemRelation))) - .andExpect(content().string(Matchers.containsString(typeRelation))); + .andExpect(content().string(Matchers.containsString(typeRelation))) + .andExpect(content().string(Matchers.containsString(linksetRelation))) + .andExpect(content().string(Matchers.containsString(jsonLinksetRelation))); } @Test @@ -416,6 +469,7 @@ public void findTypedLinkForItemWithAuthor() throws Exception { context.restoreAuthSystemState(); String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); String dcIdentifierUriMetadataValue = itemService .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); @@ -423,7 +477,7 @@ public void findTypedLinkForItemWithAuthor() throws Exception { .header("Accept", "application/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.hasSize(5))) + Matchers.hasSize(7))) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + "&& @.rel == 'author')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(doiPattern, doi) + "' " + @@ -434,7 +488,15 @@ public void findTypedLinkForItemWithAuthor() throws Exception { "&& @.rel == 'item' " + "&& @.type == 'text/plain')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + - "&& @.rel == 'type')]").exists()); + "&& @.rel == 'type')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + + publication.getID().toString() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + + publication.getID().toString() + "/json' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); } @Test diff --git a/dspace/config/spring/rest/signposting.xml b/dspace/config/spring/rest/signposting.xml index c8bc63c6f0dd..ba34fc6ce212 100644 --- a/dspace/config/spring/rest/signposting.xml +++ b/dspace/config/spring/rest/signposting.xml @@ -3,8 +3,6 @@ xmlns:util="http://www.springframework.org/schema/util" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> - - @@ -41,6 +39,9 @@ + + From 013b95ea2a8795e2f1f3a5a6eca3811457ce04b3 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Fri, 16 Jun 2023 14:37:19 +0200 Subject: [PATCH 302/686] 102888: Implement feedback --- .../java/org/dspace/statistics/SolrLoggerServiceImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 63858ce5810e..7853c3e11abf 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -219,7 +219,7 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, @Override public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser, String referrer) { - if (solr == null || locationService == null) { + if (solr == null) { return; } initSolrYearCores(); @@ -266,7 +266,7 @@ public void postView(DSpaceObject dspaceObject, @Override public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer) { - if (solr == null || locationService == null) { + if (solr == null) { return; } initSolrYearCores(); From 240bfbfdc95d3cdba4fb3d1e62dc02d0ceb3ac08 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 16 Jun 2023 11:00:03 -0500 Subject: [PATCH 303/686] Fix to adjusting bundle and bitstream policies (item install, embargo lift) Apply DEFAULT_ITEM_READ to bundles, not DEFAULT_BITSTREAM_READ so that files can be listed if the item / default item is readable --- .../java/org/dspace/content/ItemServiceImpl.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 694e3a58365f..8d1ba14b2c4a 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -930,16 +930,23 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect @Override public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { - List defaultCollectionPolicies = authorizeService + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other + // policies or embargos applied + List defaultCollectionBundlePolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // Bitstreams should inherit from DEFAULT_BITSTREAM_READ + List defaultCollectionBitstreamPolicies = authorizeService .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, ResourcePolicy.TYPE_CUSTOM); - if (defaultCollectionPolicies.size() < 1) { + if (defaultCollectionBitstreamPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() + " (" + collection.getHandle() + ")" + " has no default bitstream READ policies"); } + // TODO: should we also throw an exception if no DEFAULT_ITEM_READ? // remove all policies from bundles, add new ones // Remove bundles @@ -950,11 +957,12 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_WORKFLOW); addCustomPoliciesNotInPlace(context, mybundle, defaultItemPolicies); - addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionPolicies); + addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); for (Bitstream bitstream : mybundle.getBitstreams()) { // if come from InstallItem: remove all submission/workflow policies - removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionPolicies); + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, + defaultCollectionBitstreamPolicies); } } } From 3ff5eb61c0b7b80820dde771c96d7d18f8500858 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 16 Jun 2023 11:00:03 -0500 Subject: [PATCH 304/686] Fix to adjusting bundle and bitstream policies (item install, embargo lift) Apply DEFAULT_ITEM_READ to bundles, not DEFAULT_BITSTREAM_READ so that files can be listed if the item / default item is readable Co-authored-by: kshepherd Co-authored-by: tdonohue --- .../java/org/dspace/content/ItemTest.java | 72 +++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index bae6ce9e1d90..d440597ec416 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -1393,6 +1393,78 @@ public void testInheritCollectionDefaultPolicies() throws Exception { assertTrue("testInheritCollectionDefaultPolicies 2", equals); } + // Test to verify DEFAULT_*_READ policies on collection inherit properly to Item/Bundle/Bitstream + @Test + public void testInheritCollectionDefaultPolicies_custom_default_groups() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new collection + Collection c = createCollection(); + // Create a custom group with DEFAULT_ITEM_READ privileges in this Collection + Group item_read_role = collectionService.createDefaultReadGroup(context, c, "ITEM", + Constants.DEFAULT_ITEM_READ); + // Create a custom group with DEFAULT_BITSTREAM_READ privileges in this Collection + Group bitstream_read_role = collectionService.createDefaultReadGroup(context, c, "BITSTREAM", + Constants.DEFAULT_BITSTREAM_READ); + context.restoreAuthSystemState(); + + // Verify that Collection's DEFAULT_ITEM_READ now uses the newly created group. + List defaultItemReadPolicies = + authorizeService.getPoliciesActionFilter(context, c, Constants.DEFAULT_ITEM_READ); + assertEquals("One DEFAULT_ITEM_READ policy", 1, defaultItemReadPolicies.size()); + assertEquals("DEFAULT_ITEM_READ group", item_read_role.getName(), + defaultItemReadPolicies.get(0).getGroup().getName()); + + // Verify that Collection's DEFAULT_BITSTREAM_READ now uses the newly created group. + List defaultBitstreamReadPolicies = + authorizeService.getPoliciesActionFilter(context, c, Constants.DEFAULT_BITSTREAM_READ); + assertEquals("One DEFAULT_BITSTREAM_READ policy on Collection", 1, defaultBitstreamReadPolicies.size()); + assertEquals("DEFAULT_BITSTREAM_READ group", bitstream_read_role.getName(), + defaultBitstreamReadPolicies.get(0).getGroup().getName()); + + context.turnOffAuthorisationSystem(); + // Create a new Item in this Collection + WorkspaceItem workspaceItem = workspaceItemService.create(context, c, false); + Item item = workspaceItem.getItem(); + // Add a single Bitstream to the ORIGINAL bundle + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bitstream = itemService.createSingleBitstream(context, new FileInputStream(f), item); + context.restoreAuthSystemState(); + + // Allow Item WRITE perms + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE, true); + // Inherit all default policies from Collection down to new Item + itemService.inheritCollectionDefaultPolicies(context, item, c); + + // Verify Item inherits DEFAULT_ITEM_READ group from Collection + List itemReadPolicies = authorizeService.getPoliciesActionFilter(context, item, Constants.READ); + assertEquals("One READ policy on Item", 1, itemReadPolicies.size()); + assertEquals("Item's READ group", item_read_role.getName(), + itemReadPolicies.get(0).getGroup().getName()); + + // Verify Bitstream inherits DEFAULT_BITSTREAM_READ group from Collection + List bitstreamReadPolicies = authorizeService.getPoliciesActionFilter(context, bitstream, + Constants.READ); + assertEquals("One READ policy on Bitstream", 1, bitstreamReadPolicies.size()); + assertEquals("Bitstream's READ group", bitstream_read_role.getName(), + bitstreamReadPolicies.get(0).getGroup().getName()); + + // Verify ORIGINAL Bundle inherits DEFAULT_ITEM_READ group from Collection + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if files are access restricted or embargoed) + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + Bundle originalBundle = bundles.get(0); + List bundleReadPolicies = authorizeService.getPoliciesActionFilter(context, originalBundle, + Constants.READ); + assertEquals("One READ policy on Bundle", 1, bundleReadPolicies.size()); + assertEquals("Bundles's READ group", item_read_role.getName(), + bundleReadPolicies.get(0).getGroup().getName()); + + // Cleanup after ourselves. Delete created collection & all content under it + context.turnOffAuthorisationSystem(); + collectionService.delete(context, c); + context.restoreAuthSystemState(); + } + /** * Test of move method, of class Item. */ From 7dd34ccc5009180dab15482bd44551ea77ad4d45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Jun 2023 20:23:12 +0000 Subject: [PATCH 305/686] Bump guava from 31.0.1-jre to 32.0.0-jre Bumps [guava](https://github.com/google/guava) from 31.0.1-jre to 32.0.0-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index deea90890c70..f8e873c8d4d5 100644 --- a/pom.xml +++ b/pom.xml @@ -1772,7 +1772,7 @@ com.google.guava guava - 31.0.1-jre + 32.0.0-jre From cf1257f20e3d9672c6d96eec4b1b949cf330e2b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Jun 2023 16:35:40 +0000 Subject: [PATCH 306/686] Bump netty-handler from 4.1.68.Final to 4.1.94.Final in /dspace-api Bumps [netty-handler](https://github.com/netty/netty) from 4.1.68.Final to 4.1.94.Final. - [Commits](https://github.com/netty/netty/compare/netty-4.1.68.Final...netty-4.1.94.Final) --- updated-dependencies: - dependency-name: io.netty:netty-handler dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- dspace-api/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index d41c51638c3a..b21a76f352e5 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -876,7 +876,7 @@ io.netty netty-handler - 4.1.68.Final + 4.1.94.Final io.netty From b52a4fb50dcda128b30b2aed55dfff180c0e9160 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 20 Jun 2023 13:23:49 -0500 Subject: [PATCH 307/686] Update all Netty dependencies to 4.1.94.Final Also had to add netty-transport-native-unix-common so that Solr doesn't throw dependency convergence issues. --- dspace-api/pom.xml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index b21a76f352e5..c4c0b2182ac8 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -856,22 +856,27 @@ - io.netty netty-buffer - 4.1.68.Final + 4.1.94.Final io.netty netty-transport - 4.1.68.Final + 4.1.94.Final + + io.netty + netty-transport-native-unix-common + 4.1.94.Final + io.netty netty-common - 4.1.68.Final + 4.1.94.Final io.netty @@ -881,7 +886,7 @@ io.netty netty-codec - 4.1.68.Final + 4.1.94.Final org.apache.velocity From 907c45a455d365a1e57bafe749ab64240bcdc181 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Wed, 21 Jun 2023 17:20:01 +0200 Subject: [PATCH 308/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../controller/LinksetRestController.java | 129 ++++++++---- .../converter/LinksetConverter.java | 12 +- .../converter/LinksetNodeConverter.java | 37 ---- .../LinksetRestMessageConverter.java | 30 +-- .../app/rest/signposting/model/Linkset.java | 24 +++ .../model/LinksetRelationType.java | 4 +- .../rest/signposting/model/LinksetRest.java | 12 -- .../model/MetadataConfiguration.java | 10 + .../rest/signposting/model/TypedLinkRest.java | 13 +- .../item/ItemDescribedbyProcessor.java | 88 ++++++++ .../item/ItemIdentifierProcessor.java | 53 +---- ...MetadataDescribesSignpostingProcessor.java | 53 +++++ .../MetadataSignpostingProcessor.java | 45 +++++ .../rest/signposting/utils/LinksetMapper.java | 4 +- .../controller/LinksetRestControllerIT.java | 189 +++++++++++++++--- dspace/config/spring/rest/signposting.xml | 21 +- 16 files changed, 541 insertions(+), 183 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index 7932d94f7918..eecdefe84253 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -12,18 +12,23 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; +import org.apache.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.security.BitstreamMetadataReadPermissionEvaluatorPlugin; import org.dspace.app.rest.signposting.converter.LinksetRestMessageConverter; +import org.dspace.app.rest.signposting.model.Linkset; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRest; import org.dspace.app.rest.signposting.model.TypedLinkRest; import org.dspace.app.rest.signposting.processor.bitstream.BitstreamSignpostingProcessor; import org.dspace.app.rest.signposting.processor.item.ItemSignpostingProcessor; +import org.dspace.app.rest.signposting.processor.metadata.MetadataSignpostingProcessor; import org.dspace.app.rest.signposting.utils.LinksetMapper; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.Utils; @@ -58,6 +63,8 @@ @ConditionalOnProperty("signposting.enabled") public class LinksetRestController { + private static final Logger log = Logger.getLogger(LinksetRestController.class); + @Autowired private Utils utils; @Autowired @@ -66,6 +73,14 @@ public class LinksetRestController { private ItemService itemService; @Autowired private ConverterService converter; + @Autowired + private BitstreamMetadataReadPermissionEvaluatorPlugin bitstreamMetadataReadPermissionEvaluatorPlugin; + private List bitstreamProcessors = new DSpace().getServiceManager() + .getServicesByType(BitstreamSignpostingProcessor.class); + private List itemProcessors = new DSpace().getServiceManager() + .getServicesByType(ItemSignpostingProcessor.class); + private List metadataProcessors = new DSpace().getServiceManager() + .getServicesByType(MetadataSignpostingProcessor.class); @PreAuthorize("permitAll()") @RequestMapping(method = RequestMethod.GET) @@ -85,16 +100,9 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) throw new ResourceNotFoundException("No such Item: " + uuid); } verifyItemIsDiscoverable(item); - - List linksetNodes = new ArrayList<>(); - if (item.getType() == Constants.ITEM) { - List ispp = new DSpace().getServiceManager() - .getServicesByType(ItemSignpostingProcessor.class); - for (ItemSignpostingProcessor sp : ispp) { - sp.addLinkSetNodes(context, request, item, linksetNodes); - } - } - return converter.toRest(LinksetMapper.map(linksetNodes), utils.obtainProjection()); + List> linksetNodes = createLinksetNodes(request, context, item); + List linksets = linksetNodes.stream().map(LinksetMapper::map).collect(Collectors.toList()); + return converter.toRest(linksets, utils.obtainProjection()); } catch (SQLException e) { throw new RuntimeException(e); } @@ -111,23 +119,8 @@ public String getLset(HttpServletRequest request, @PathVariable UUID uuid) { throw new ResourceNotFoundException("No such Item: " + uuid); } verifyItemIsDiscoverable(item); - - List linksetNodes = new ArrayList<>(); - List ispp = new DSpace().getServiceManager() - .getServicesByType(ItemSignpostingProcessor.class); - for (ItemSignpostingProcessor sp : ispp) { - sp.addLinkSetNodes(context, request, item, linksetNodes); - } - - LinksetRest linksetRest = null; - for (LinksetNode linksetNode : linksetNodes) { - if (linksetRest == null) { - linksetRest = converter.toRest(linksetNode, utils.obtainProjection()); - } else { - linksetRest.getLinksetNodes().add(linksetNode); - } - } - return LinksetRestMessageConverter.convert(linksetRest); + List> linksetNodes = createLinksetNodes(request, context, item); + return LinksetRestMessageConverter.convert(linksetNodes); } catch (SQLException e) { throw new RuntimeException(e); } @@ -154,27 +147,91 @@ public List getHeader(HttpServletRequest request, @PathVariable U List linksetNodes = new ArrayList<>(); if (dso.getType() == Constants.ITEM) { verifyItemIsDiscoverable((Item) dso); - List ispp = new DSpace().getServiceManager() - .getServicesByType(ItemSignpostingProcessor.class); - for (ItemSignpostingProcessor sp : ispp) { - sp.addLinkSetNodes(context, request, (Item) dso, linksetNodes); + for (ItemSignpostingProcessor processor : itemProcessors) { + processor.addLinkSetNodes(context, request, (Item) dso, linksetNodes); } } else { - List bspp = new DSpace().getServiceManager() - .getServicesByType(BitstreamSignpostingProcessor.class); - for (BitstreamSignpostingProcessor sp : bspp) { - sp.addLinkSetNodes(context, request, (Bitstream) dso, linksetNodes); + for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { + processor.addLinkSetNodes(context, request, (Bitstream) dso, linksetNodes); } } return linksetNodes.stream() - .map(node -> new TypedLinkRest(node.getLink(), node.getRelation(), node.getType())) + .map(node -> + new TypedLinkRest(node.getLink(), node.getRelation(), node.getType(), node.getAnchor())) .collect(Collectors.toList()); } catch (SQLException e) { throw new RuntimeException(e); } } + private List> createLinksetNodes( + HttpServletRequest request, + Context context, Item item + ) throws SQLException { + ArrayList> linksets = new ArrayList<>(); + addItemLinksets(request, context, item, linksets); + addBitstreamLinksets(request, context, item, linksets); + addMetadataLinksets(request, context, item, linksets); + return linksets; + } + + private void addMetadataLinksets( + HttpServletRequest request, + Context context, + Item item, + ArrayList> linksets + ) { + for (MetadataSignpostingProcessor processor : metadataProcessors) { + List metadataLinkset = new ArrayList<>(); + processor.addLinkSetNodes(context, request, item, metadataLinkset); + if (!metadataLinkset.isEmpty()) { + linksets.add(metadataLinkset); + } + } + } + + private void addBitstreamLinksets( + HttpServletRequest request, + Context context, + Item item, + ArrayList> linksets + ) throws SQLException { + Iterator bitstreamsIterator = bitstreamService.getItemBitstreams(context, item); + bitstreamsIterator.forEachRemaining(bitstream -> { + try { + boolean isAuthorized = bitstreamMetadataReadPermissionEvaluatorPlugin + .metadataReadPermissionOnBitstream(context, bitstream); + if (isAuthorized) { + List bitstreamLinkset = new ArrayList<>(); + for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { + processor.addLinkSetNodes(context, request, bitstream, bitstreamLinkset); + } + if (!bitstreamLinkset.isEmpty()) { + linksets.add(bitstreamLinkset); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + }); + } + + private void addItemLinksets( + HttpServletRequest request, + Context context, + Item item, + List> linksets + ) { + List linksetNodes = new ArrayList<>(); + if (item.getType() == Constants.ITEM) { + for (ItemSignpostingProcessor sp : itemProcessors) { + sp.addLinkSetNodes(context, request, item, linksetNodes); + } + } + linksets.add(linksetNodes); + } + private static void verifyItemIsDiscoverable(Item item) { if (!item.isDiscoverable()) { String message = format("Item with uuid [%s] is not Discoverable", item.getID().toString()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java index e8f151f14a00..90786b9dc426 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.signposting.converter; +import java.util.List; + import org.dspace.app.rest.converter.DSpaceConverter; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.signposting.model.Linkset; @@ -20,18 +22,18 @@ * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ @Component -public class LinksetConverter implements DSpaceConverter { +public class LinksetConverter implements DSpaceConverter, LinksetRest> { @Override - public LinksetRest convert(Linkset linkset, Projection projection) { + public LinksetRest convert(List linksets, Projection projection) { LinksetRest linksetRest = new LinksetRest(); linksetRest.setProjection(projection); - linksetRest.getLinkset().add(linkset); + linksetRest.setLinkset(linksets); return linksetRest; } @Override - public Class getModelClass() { - return Linkset.class; + public Class> getModelClass() { + return (Class>) ((Class) List.class); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java deleted file mode 100644 index e73aa260ad3e..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetNodeConverter.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.signposting.converter; - -import org.dspace.app.rest.converter.DSpaceConverter; -import org.dspace.app.rest.projection.Projection; -import org.dspace.app.rest.signposting.model.LinksetNode; -import org.dspace.app.rest.signposting.model.LinksetRest; -import org.springframework.stereotype.Component; - - -/** - * This is the converter from/to the Lset in the DSpace API data model and the REST data model. - * - * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) - */ -@Component -public class LinksetNodeConverter implements DSpaceConverter { - - @Override - public LinksetRest convert(LinksetNode linkSetNode, Projection projection) { - LinksetRest linksetRest = new LinksetRest(); - linksetRest.setProjection(projection); - linksetRest.getLinksetNodes().add(linkSetNode); - return linksetRest; - } - - @Override - public Class getModelClass() { - return LinksetNode.class; - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java index 03c27897b9cd..24c8e6735dc9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java @@ -11,10 +11,12 @@ import static java.util.Objects.nonNull; import static org.apache.commons.lang.StringUtils.isNotBlank; -import org.dspace.app.rest.signposting.model.LinksetRest; +import java.util.List; + +import org.dspace.app.rest.signposting.model.LinksetNode; /** - * Converter for converting LinksetRest message into application/linkset format. + * Converter for converting list of linkset nodes into application/linkset format. */ public class LinksetRestMessageConverter { @@ -22,25 +24,25 @@ private LinksetRestMessageConverter() { } /** - * Converts LinksetRest object into string of application/linkset format. + * Converts list of linkset nodes into string of application/linkset format. * - * @param linksetRest linkset rest object + * @param linksetNodes link of linkset nodes * @return string of application/linkset format. */ - public static String convert(LinksetRest linksetRest) { + public static String convert(List> linksetNodes) { StringBuilder responseBody = new StringBuilder(); - linksetRest.getLinksetNodes().forEach(linksetNodes -> { - if (isNotBlank(linksetNodes.getLink())) { - responseBody.append(format("<%s> ", linksetNodes.getLink())); + linksetNodes.stream().flatMap(List::stream).forEach(linksetNode -> { + if (isNotBlank(linksetNode.getLink())) { + responseBody.append(format("<%s> ", linksetNode.getLink())); } - if (nonNull(linksetNodes.getRelation())) { - responseBody.append(format("; rel=\"%s\" ", linksetNodes.getRelation().getName())); + if (nonNull(linksetNode.getRelation())) { + responseBody.append(format("; rel=\"%s\" ", linksetNode.getRelation().getName())); } - if (isNotBlank(linksetNodes.getType())) { - responseBody.append(format("; type=\"%s\" ", linksetNodes.getType())); + if (isNotBlank(linksetNode.getType())) { + responseBody.append(format("; type=\"%s\" ", linksetNode.getType())); } - if (isNotBlank(linksetNodes.getAnchor())) { - responseBody.append(format("; anchor=\"%s\" ", linksetNodes.getAnchor())); + if (isNotBlank(linksetNode.getAnchor())) { + responseBody.append(format("; anchor=\"%s\" ", linksetNode.getAnchor())); } responseBody.append(", "); }); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java index c70e4916f69c..14d6f6581c7a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java @@ -33,6 +33,10 @@ public class Linkset { private List license; @JsonInclude(JsonInclude.Include.NON_EMPTY) private List linkset; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List describes; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + private List describedby; @JsonInclude(JsonInclude.Include.NON_NULL) private String anchor; @@ -106,6 +110,26 @@ public void setLinkset(List linkset) { this.linkset = linkset; } + public List getDescribes() { + if (describes == null) { + describes = new ArrayList<>(); + } + return describes; + } + public void setDescribes(List describes) { + this.describes = describes; + } + + public List getDescribedby() { + if (describedby == null) { + describes = new ArrayList<>(); + } + return describedby; + } + public void setDescribedby(List describedby) { + this.describedby = describedby; + } + public String getAnchor() { return anchor; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java index 7a513b5d998a..285bf5a56ee1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java @@ -20,7 +20,9 @@ public enum LinksetRelationType { TYPE("type"), LICENSE("license"), COLLECTION("collection"), - LINKSET("linkset"); + LINKSET("linkset"), + DESCRIBES("describes"), + DESCRIBED_BY("describedby"); private final String name; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java index 437e486ad097..df80cd5c2d50 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java @@ -32,8 +32,6 @@ public class LinksetRest extends RestAddressableModel { @JsonInclude(Include.NON_EMPTY) private List linkset; - @JsonInclude(Include.NON_EMPTY) - private List linkSetNode; public List getLinkset() { if (this.linkset == null) { @@ -45,16 +43,6 @@ public void setLinkset(List linkset) { this.linkset = linkset; } - public List getLinksetNodes() { - if (this.linkSetNode == null) { - this.linkSetNode = new ArrayList<>(); - } - return linkSetNode; - } - public void setLset(List linkSetNode) { - this.linkSetNode = linkSetNode; - } - @JsonIgnore @Override public String getType() { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java index 99dabd003f94..c49b32834686 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java @@ -16,6 +16,8 @@ public class MetadataConfiguration { private String pattern; + private String mimeType; + public MetadataConfiguration() { } @@ -39,4 +41,12 @@ public String getPattern() { public void setPattern(String pattern) { this.pattern = pattern; } + + public String getMimeType() { + return mimeType; + } + + public void setMimeType(String mimeType) { + this.mimeType = mimeType; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java index 3ba09bf1094c..5fbd10a3022a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java @@ -28,13 +28,16 @@ public class TypedLinkRest extends RestAddressableModel { private String type; + private String anchor; + public TypedLinkRest() { } - public TypedLinkRest(String href, LinksetRelationType rel, String type) { + public TypedLinkRest(String href, LinksetRelationType rel, String type, String anchor) { this.href = href; this.rel = rel; this.type = type; + this.anchor = anchor; } public String getHref() { @@ -57,6 +60,14 @@ public void setType(String type) { this.type = type; } + public String getAnchor() { + return anchor; + } + + public void setAnchor(String anchor) { + this.anchor = anchor; + } + @Override public String getType() { return type; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java new file mode 100644 index 000000000000..78d95cddb88d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java @@ -0,0 +1,88 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.item; + +import static java.util.Objects.isNull; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.app.rest.signposting.model.MetadataConfiguration; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; + +/** + * An extension of {@link ItemSignpostingProcessor} for the describedby relation. + */ +public class ItemDescribedbyProcessor extends ItemSignpostingProcessor { + + private static final Logger log = Logger.getLogger(ItemDescribedbyProcessor.class); + + private List metadataConfigurations; + + private final ItemService itemService; + + public ItemDescribedbyProcessor(FrontendUrlService frontendUrlService, ItemService itemService) { + super(frontendUrlService); + this.itemService = itemService; + setRelation(LinksetRelationType.DESCRIBED_BY); + } + + @Override + public void addLinkSetNodes(Context context, HttpServletRequest request, + Item item, List linksetNodes) { + getMetadataConfigurations() + .forEach(metadataHandle -> handleMetadata(context, item, linksetNodes, metadataHandle)); + } + + private void handleMetadata(Context context, + Item item, + List linksetNodes, + MetadataConfiguration metadataConfiguration) { + try { + List identifiers = itemService + .getMetadataByMetadataString(item, metadataConfiguration.getMetadataField()); + for (MetadataValue identifier : identifiers) { + if (nonNull(identifier)) { + String identifierValue = identifier.getValue(); + if (isNotBlank(identifierValue)) { + if (isNotBlank(metadataConfiguration.getPattern())) { + identifierValue = MessageFormat.format(metadataConfiguration.getPattern(), identifierValue); + } + LinksetNode node = new LinksetNode(identifierValue, getRelation(), + metadataConfiguration.getMimeType(), buildAnchor(context, item)); + linksetNodes.add(node); + } + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + + public List getMetadataConfigurations() { + if (isNull(metadataConfigurations)) { + metadataConfigurations = new ArrayList<>(); + } + return metadataConfigurations; + } + + public void setMetadataConfigurations(List metadataConfigurations) { + this.metadataConfigurations = metadataConfigurations; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java index 79a820d702d8..c5ebe958d97d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java @@ -7,21 +7,17 @@ */ package org.dspace.app.rest.signposting.processor.item; -import static java.util.Objects.isNull; import static java.util.Objects.nonNull; import static org.apache.commons.lang3.StringUtils.isNotBlank; import java.text.MessageFormat; -import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; -import org.dspace.app.rest.signposting.model.MetadataConfiguration; import org.dspace.content.Item; -import org.dspace.content.MetadataValue; +import org.dspace.content.MetadataFieldName; import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.util.FrontendUrlService; @@ -35,13 +31,6 @@ */ public class ItemIdentifierProcessor extends ItemSignpostingProcessor { - /** - * log4j category - */ - private static final Logger log = Logger.getLogger(ItemIdentifierProcessor.class); - - private List metadataConfigurations; - private final ItemService itemService; public ItemIdentifierProcessor(FrontendUrlService frontendUrlService, ItemService itemService) { @@ -53,41 +42,13 @@ public ItemIdentifierProcessor(FrontendUrlService frontendUrlService, ItemServic @Override public void addLinkSetNodes(Context context, HttpServletRequest request, Item item, List linksetNodes) { - getMetadataConfigurations() - .forEach(metadataHandle -> handleMetadata(context, item, linksetNodes, metadataHandle)); - } - - private void handleMetadata(Context context, - Item item, - List linksetNodes, - MetadataConfiguration metadataConfiguration) { - try { - List identifiers = itemService - .getMetadataByMetadataString(item, metadataConfiguration.getMetadataField()); - for (MetadataValue identifier : identifiers) { - if (nonNull(identifier)) { - String identifierValue = identifier.getValue(); - if (isNotBlank(identifierValue)) { - if (isNotBlank(metadataConfiguration.getPattern())) { - identifierValue = MessageFormat.format(metadataConfiguration.getPattern(), identifierValue); - } - linksetNodes.add(new LinksetNode(identifierValue, getRelation(), buildAnchor(context, item))); - } - } + String identifier = itemService + .getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), Item.ANY); + if (nonNull(identifier)) { + if (isNotBlank(getPattern())) { + identifier = MessageFormat.format(getPattern(), item); } - } catch (Exception e) { - log.error(e.getMessage(), e); + linksetNodes.add(new LinksetNode(identifier, getRelation(), buildAnchor(context, item))); } } - - public List getMetadataConfigurations() { - if (isNull(metadataConfigurations)) { - metadataConfigurations = new ArrayList<>(); - } - return metadataConfigurations; - } - - public void setMetadataConfigurations(List metadataConfigurations) { - this.metadataConfigurations = metadataConfigurations; - } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java new file mode 100644 index 000000000000..8e3751bc2059 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java @@ -0,0 +1,53 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.metadata; + +import static org.dspace.content.Item.ANY; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.model.LinksetRelationType; +import org.dspace.app.rest.signposting.model.MetadataConfiguration; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.util.FrontendUrlService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * An extension of {@link MetadataSignpostingProcessor} for the 'describes' relation. + */ +public class MetadataDescribesSignpostingProcessor extends MetadataSignpostingProcessor { + + @Autowired + private FrontendUrlService frontendUrlService; + + public MetadataDescribesSignpostingProcessor(ItemService itemService) { + super(itemService); + setRelation(LinksetRelationType.DESCRIBES); + } + + @Override + public void addLinkSetNodes( + Context context, + HttpServletRequest request, + Item item, + List linksetNodes + ) { + String metadataValue = itemService.getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), ANY); + if (StringUtils.isNotBlank(metadataValue)) { + String itemUrl = frontendUrlService.generateUrl(context, item); + String anchor = buildAnchor(new MetadataConfiguration(getMetadataField(), getPattern()), item); + linksetNodes.add(new LinksetNode(itemUrl, getRelation(), "text/html", anchor)); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java new file mode 100644 index 000000000000..27f06960e753 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.processor.metadata; + +import static org.apache.commons.lang.StringUtils.isNotBlank; +import static org.dspace.content.Item.ANY; + +import java.text.MessageFormat; + +import org.apache.logging.log4j.util.Strings; +import org.dspace.app.rest.signposting.model.MetadataConfiguration; +import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; +import org.dspace.app.rest.signposting.processor.SignPostingProcessor; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.service.ItemService; + +/** + * An abstract class represents {@link SignPostingProcessor } for a metadata. + */ +public abstract class MetadataSignpostingProcessor extends AbstractSignPostingProcessor + implements SignPostingProcessor { + + protected final ItemService itemService; + + public MetadataSignpostingProcessor(ItemService itemService) { + this.itemService = itemService; + } + + public String buildAnchor(MetadataConfiguration metadataConfiguration, Item item) { + String metadataValue = itemService + .getMetadataFirstValue(item, new MetadataFieldName(metadataConfiguration.getMetadataField()), ANY); + if (isNotBlank(metadataValue)) { + return isNotBlank(metadataConfiguration.getPattern()) + ? MessageFormat.format(metadataConfiguration.getPattern(), metadataValue) + : metadataValue; + } + return Strings.EMPTY; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java index 31860143b997..5da05bc44059 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java @@ -26,7 +26,7 @@ private LinksetMapper() { /** * Converts list of linkset nodes into linkset. * - * @param linksetNodes + * @param linksetNodes list of linkset nodes * @return linkset */ public static Linkset map(List linksetNodes) { @@ -38,6 +38,8 @@ public static Linkset map(List linksetNodes) { linkset.setCollection(getLinksetRelationsByType(linksetNodes, LinksetRelationType.COLLECTION)); linkset.setLicense(getLinksetRelationsByType(linksetNodes, LinksetRelationType.LICENSE)); linkset.setCiteAs(getLinksetRelationsByType(linksetNodes, LinksetRelationType.CITE_AS)); + linkset.setDescribes(getLinksetRelationsByType(linksetNodes, LinksetRelationType.DESCRIBES)); + linkset.setDescribedby(getLinksetRelationsByType(linksetNodes, LinksetRelationType.DESCRIBED_BY)); if (!linksetNodes.isEmpty()) { linkset.setAnchor(linksetNodes.get(0).getAnchor()); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 7b3b6543ef3e..ed3e63dde570 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -115,9 +115,17 @@ public void findOneItemJsonLinksets() throws Exception { getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) + Matchers.hasSize(3))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString("application/vnd.datacite.datacite+json"))) + .andExpect(jsonPath("$.linkset[0].describedby[1].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[1].type", + Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[0].linkset[0].href", Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) .andExpect(jsonPath("$.linkset[0].linkset[0].type", @@ -127,6 +135,18 @@ public void findOneItemJsonLinksets() throws Exception { "/json"))) .andExpect(jsonPath("$.linkset[0].linkset[1].type", Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString("http://localhost:4000/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[2].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) ; } @@ -146,9 +166,17 @@ public void findOneItemJsonLinksetsWithType() throws Exception { getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) + Matchers.hasSize(3))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString("application/vnd.datacite.datacite+json"))) + .andExpect(jsonPath("$.linkset[0].describedby[1].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[1].type", + Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[0].type", Matchers.hasSize(2))) .andExpect(jsonPath("$.linkset[0].type[0].href", @@ -163,7 +191,19 @@ public void findOneItemJsonLinksetsWithType() throws Exception { Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + "/json"))) .andExpect(jsonPath("$.linkset[0].linkset[1].type", - Matchers.hasToString("application/linkset+json"))); + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString("http://localhost:4000/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[2].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); } @Test @@ -181,7 +221,7 @@ public void findOneItemJsonLinksetsWithLicence() throws Exception { getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) + Matchers.hasSize(2))) .andExpect(jsonPath("$.linkset[0].type[0].href", Matchers.hasToString("https://schema.org/AboutPage"))) .andExpect(jsonPath("$.linkset[0].license[0].href", @@ -194,7 +234,13 @@ public void findOneItemJsonLinksetsWithLicence() throws Exception { Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + "/json"))) .andExpect(jsonPath("$.linkset[0].linkset[1].type", - Matchers.hasToString("application/linkset+json"))); + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString("http://localhost:4000/handle/" + item.getHandle()))); } @Test @@ -232,9 +278,17 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(1))) + Matchers.hasSize(5))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString("application/vnd.datacite.datacite+json"))) + .andExpect(jsonPath("$.linkset[0].describedby[1].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[1].type", + Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[0].item[0].href", Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) .andExpect(jsonPath("$.linkset[0].item[0].type", @@ -253,7 +307,49 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + "/json"))) .andExpect(jsonPath("$.linkset[0].linkset[1].type", - Matchers.hasToString("application/linkset+json"))); + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[1].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[2].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[2].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[2].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[2].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream2.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[3].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[3].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[3].anchor", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[4].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[4].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[4].anchor", + Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); } @Test @@ -374,7 +470,6 @@ public void findOneItemLsetLinksets() throws Exception { context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) .withTitle("Item Test") - .withMetadata("dc", "identifier", "doi", doi) .build(); Bitstream bitstream1 = null; try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { @@ -388,7 +483,7 @@ public void findOneItemLsetLinksets() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); - String siteAsRelation = "<" + MessageFormat.format(doiPattern, doi) + "> ; rel=\"cite-as\" ; anchor=\"" + + String siteAsRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"cite-as\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + "/download> ; rel=\"item\" ; " + "type=\"text/plain\" ; anchor=\"" + url + "/entities/publication/" + @@ -401,13 +496,34 @@ public void findOneItemLsetLinksets() throws Exception { String jsonLinksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + "/json> ; rel=\"linkset\" ; type=\"application/linkset+json\" ;" + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String describedByRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"describedby\" ;" + + " type=\"text/html\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + + String bitstreamCollectionLink = "<" + url + "/entities/publication/" + item.getID() + "> ;" + + " rel=\"collection\" ; type=\"text/html\" ; anchor=\"" + url + "/bitstreams/" + + bitstream1.getID() + "/download\""; + String bitstreamLinksetLink = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID() + "> ; " + + "rel=\"linkset\" ; type=\"application/linkset\" ; " + + "anchor=\"" + url + "/bitstreams/" + bitstream1.getID() + "/download\""; + String bitstreamLinksetJsonLink = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID() + "/json> ; " + + "rel=\"linkset\" ; type=\"application/linkset+json\" ; " + + "anchor=\"" + url + "/bitstreams/" + bitstream1.getID() + "/download\""; + + String describesMetadataLink = "<" + url + "/entities/publication/" + item.getID() + "> ; " + + "rel=\"describes\" ; type=\"text/html\" ; " + + "anchor=\"" + url + "/handle/" + item.getHandle() + "\""; getClient().perform(get("/signposting/linksets/" + item.getID())) .andExpect(content().string(Matchers.containsString(siteAsRelation))) .andExpect(content().string(Matchers.containsString(itemRelation))) .andExpect(content().string(Matchers.containsString(typeRelation))) .andExpect(content().string(Matchers.containsString(linksetRelation))) - .andExpect(content().string(Matchers.containsString(jsonLinksetRelation))); + .andExpect(content().string(Matchers.containsString(jsonLinksetRelation))) + .andExpect(content().string(Matchers.containsString(describedByRelation))) + .andExpect(content().string(Matchers.containsString(bitstreamCollectionLink))) + .andExpect(content().string(Matchers.containsString(bitstreamLinksetLink))) + .andExpect(content().string(Matchers.containsString(bitstreamLinksetJsonLink))) + .andExpect(content().string(Matchers.containsString(describesMetadataLink))); } @Test @@ -472,31 +588,42 @@ public void findTypedLinkForItemWithAuthor() throws Exception { String signpostingUrl = configurationService.getProperty("signposting.path"); String dcIdentifierUriMetadataValue = itemService .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); + String anchor = url + "/entities/publication/" + publication.getID(); getClient().perform(get("/signposting/links/" + publication.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.hasSize(7))) + Matchers.hasSize(8))) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + - "&& @.rel == 'author')]").exists()) + "&& @.rel == 'author' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(doiPattern, doi) + "' " + - "&& @.rel == 'cite-as')]").exists()) + "&& @.rel == 'describedby' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + - "&& @.rel == 'cite-as')]").exists()) + "&& @.rel == 'describedby' " + + "&& @.anchor == '" + anchor + "')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + + "&& @.rel == 'cite-as' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/bitstreams/" + bitstream.getID() + "/download' " + "&& @.rel == 'item' " + - "&& @.type == 'text/plain')]").exists()) + "&& @.type == 'text/plain' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + - "&& @.rel == 'type')]").exists()) + "&& @.rel == 'type' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + publication.getID().toString() + "' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset')]").exists()) + "&& @.type == 'application/linkset' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + publication.getID().toString() + "/json' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json')]").exists()); + "&& @.type == 'application/linkset+json' " + + "&& @.anchor == '" + anchor + "')]").exists()); } @Test @@ -521,6 +648,7 @@ public void findTypedLinkForBitstream() throws Exception { context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); + String anchor = uiUrl + "/bitstreams/" + bitstream.getID() + "/download"; getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) @@ -528,13 +656,16 @@ public void findTypedLinkForBitstream() throws Exception { Matchers.hasSize(3))) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + "&& @.rel == 'collection' " + - "&& @.type == 'text/html')]").exists()) + "&& @.type == 'text/html' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset')]").exists()) + "&& @.type == 'application/linkset' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json')]").exists()); + "&& @.type == 'application/linkset+json' " + + "&& @.anchor == '" + anchor + "')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -565,6 +696,7 @@ public void findTypedLinkForBitstreamWithType() throws Exception { context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); + String anchor = uiUrl + "/bitstreams/" + bitstream.getID() + "/download"; getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) @@ -572,15 +704,19 @@ public void findTypedLinkForBitstreamWithType() throws Exception { Matchers.hasSize(4))) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + "&& @.rel == 'collection' " + - "&& @.type == 'text/html')]").exists()) + "&& @.type == 'text/html' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset')]").exists()) + "&& @.type == 'application/linkset' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json')]").exists()) + "&& @.type == 'application/linkset+json' " + + "&& @.anchor == '" + anchor + "')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/ScholarlyArticle' " + - "&& @.rel == 'type')]").exists()); + "&& @.rel == 'type' " + + "&& @.anchor == '" + anchor + "')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -612,7 +748,6 @@ public void findTypedLinkForRestrictedBitstream() throws Exception { } context.restoreAuthSystemState(); - String uiUrl = configurationService.getProperty("dspace.ui.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isUnauthorized()); @@ -645,7 +780,6 @@ public void findTypedLinkForBitstreamUnderEmbargo() throws Exception { } context.restoreAuthSystemState(); - String uiUrl = configurationService.getProperty("dspace.ui.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isUnauthorized()); @@ -677,7 +811,6 @@ public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { } context.restoreAuthSystemState(); - String uiUrl = configurationService.getProperty("dspace.ui.url"); getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isUnauthorized()); diff --git a/dspace/config/spring/rest/signposting.xml b/dspace/config/spring/rest/signposting.xml index ba34fc6ce212..51060590d280 100644 --- a/dspace/config/spring/rest/signposting.xml +++ b/dspace/config/spring/rest/signposting.xml @@ -19,15 +19,22 @@ - + - + + + + + + + @@ -52,4 +59,14 @@ + + + + + + + + From 55bfd4b2e54460f968a23875cb669dc12938ec8a Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Thu, 22 Jun 2023 13:50:20 +0200 Subject: [PATCH 309/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../controller/LinksetRestController.java | 166 +++++-------- .../rest/signposting/model/TypedLinkRest.java | 13 +- .../item/ItemDescribedbyProcessor.java | 58 +---- ...MetadataDescribesSignpostingProcessor.java | 18 +- .../MetadataSignpostingProcessor.java | 31 +-- .../signposting/service/LinksetService.java | 50 ++++ .../service/impl/LinksetServiceImpl.java | 151 ++++++++++++ .../controller/LinksetRestControllerIT.java | 232 +++++++++++------- dspace/config/modules/signposting.cfg | 8 +- dspace/config/spring/rest/signposting.xml | 30 +-- 10 files changed, 441 insertions(+), 316 deletions(-) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index eecdefe84253..98642bb703ed 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -8,38 +8,42 @@ package org.dspace.app.rest.signposting.controller; import static java.lang.String.format; +import static java.util.Objects.isNull; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; +import java.io.IOException; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; -import org.apache.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; -import org.dspace.app.rest.security.BitstreamMetadataReadPermissionEvaluatorPlugin; import org.dspace.app.rest.signposting.converter.LinksetRestMessageConverter; import org.dspace.app.rest.signposting.model.Linkset; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRest; import org.dspace.app.rest.signposting.model.TypedLinkRest; -import org.dspace.app.rest.signposting.processor.bitstream.BitstreamSignpostingProcessor; -import org.dspace.app.rest.signposting.processor.item.ItemSignpostingProcessor; -import org.dspace.app.rest.signposting.processor.metadata.MetadataSignpostingProcessor; +import org.dspace.app.rest.signposting.service.LinksetService; import org.dspace.app.rest.signposting.utils.LinksetMapper; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.DisseminationCrosswalk; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; -import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.utils.DSpace; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.core.service.PluginService; +import org.dspace.services.ConfigurationService; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.data.rest.webmvc.ResourceNotFoundException; @@ -63,8 +67,6 @@ @ConditionalOnProperty("signposting.enabled") public class LinksetRestController { - private static final Logger log = Logger.getLogger(LinksetRestController.class); - @Autowired private Utils utils; @Autowired @@ -74,13 +76,10 @@ public class LinksetRestController { @Autowired private ConverterService converter; @Autowired - private BitstreamMetadataReadPermissionEvaluatorPlugin bitstreamMetadataReadPermissionEvaluatorPlugin; - private List bitstreamProcessors = new DSpace().getServiceManager() - .getServicesByType(BitstreamSignpostingProcessor.class); - private List itemProcessors = new DSpace().getServiceManager() - .getServicesByType(ItemSignpostingProcessor.class); - private List metadataProcessors = new DSpace().getServiceManager() - .getServicesByType(MetadataSignpostingProcessor.class); + private LinksetService linksetService; + @Autowired + private ConfigurationService configurationService; + private final PluginService pluginService = CoreServiceFactory.getInstance().getPluginService(); @PreAuthorize("permitAll()") @RequestMapping(method = RequestMethod.GET) @@ -100,7 +99,8 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) throw new ResourceNotFoundException("No such Item: " + uuid); } verifyItemIsDiscoverable(item); - List> linksetNodes = createLinksetNodes(request, context, item); + List> linksetNodes = linksetService + .createLinksetNodesForMultipleLinksets(request, context, item); List linksets = linksetNodes.stream().map(LinksetMapper::map).collect(Collectors.toList()); return converter.toRest(linksets, utils.obtainProjection()); } catch (SQLException e) { @@ -113,13 +113,13 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) public String getLset(HttpServletRequest request, @PathVariable UUID uuid) { try { Context context = ContextUtil.obtainContext(request); - Item item = itemService.find(context, uuid); if (item == null) { throw new ResourceNotFoundException("No such Item: " + uuid); } verifyItemIsDiscoverable(item); - List> linksetNodes = createLinksetNodes(request, context, item); + List> linksetNodes = linksetService + .createLinksetNodesForMultipleLinksets(request, context, item); return LinksetRestMessageConverter.convert(linksetNodes); } catch (SQLException e) { throw new RuntimeException(e); @@ -133,103 +133,49 @@ public String getLset(HttpServletRequest request, @PathVariable UUID uuid) { @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ') && hasPermission(#uuid, 'BITSTREAM', 'READ')") @RequestMapping(value = "/links" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) public List getHeader(HttpServletRequest request, @PathVariable UUID uuid) { - try { - Context context = ContextUtil.obtainContext(request); - - DSpaceObject dso = bitstreamService.find(context, uuid); - if (dso == null) { - dso = itemService.find(context, uuid); - if (dso == null) { - throw new ResourceNotFoundException("No such resource: " + uuid); - } - } - - List linksetNodes = new ArrayList<>(); - if (dso.getType() == Constants.ITEM) { - verifyItemIsDiscoverable((Item) dso); - for (ItemSignpostingProcessor processor : itemProcessors) { - processor.addLinkSetNodes(context, request, (Item) dso, linksetNodes); - } - } else { - for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { - processor.addLinkSetNodes(context, request, (Bitstream) dso, linksetNodes); - } - } - - return linksetNodes.stream() - .map(node -> - new TypedLinkRest(node.getLink(), node.getRelation(), node.getType(), node.getAnchor())) - .collect(Collectors.toList()); - } catch (SQLException e) { - throw new RuntimeException(e); - } + Context context = ContextUtil.obtainContext(request); + DSpaceObject dso = findObject(context, uuid); + List linksetNodes = linksetService.createLinksetNodesForSingleLinkset(request, context, dso); + return linksetNodes.stream() + .map(node -> new TypedLinkRest(node.getLink(), node.getRelation(), node.getType())) + .collect(Collectors.toList()); } - private List> createLinksetNodes( + @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") + @RequestMapping(value = "/describedby" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + public String getDescribedBy( HttpServletRequest request, - Context context, Item item - ) throws SQLException { - ArrayList> linksets = new ArrayList<>(); - addItemLinksets(request, context, item, linksets); - addBitstreamLinksets(request, context, item, linksets); - addMetadataLinksets(request, context, item, linksets); - return linksets; + HttpServletResponse response, + @PathVariable UUID uuid + ) throws SQLException, AuthorizeException, IOException, CrosswalkException { + Context context = ContextUtil.obtainContext(request); + String xwalkName = configurationService.getProperty("signposting.describedby.crosswalk-name"); + String responseMimeType = configurationService.getProperty("signposting.describedby.mime-type"); + response.addHeader("Content-Type", responseMimeType); + + DSpaceObject object = findObject(context, uuid); + DisseminationCrosswalk xwalk = (DisseminationCrosswalk) + pluginService.getNamedPlugin(DisseminationCrosswalk.class, xwalkName); + List elements = xwalk.disseminateList(context, object); + XMLOutputter outputter = new XMLOutputter(Format.getCompactFormat()); + return outputter.outputString(elements); } - private void addMetadataLinksets( - HttpServletRequest request, - Context context, - Item item, - ArrayList> linksets - ) { - for (MetadataSignpostingProcessor processor : metadataProcessors) { - List metadataLinkset = new ArrayList<>(); - processor.addLinkSetNodes(context, request, item, metadataLinkset); - if (!metadataLinkset.isEmpty()) { - linksets.add(metadataLinkset); - } - } - } - - private void addBitstreamLinksets( - HttpServletRequest request, - Context context, - Item item, - ArrayList> linksets - ) throws SQLException { - Iterator bitstreamsIterator = bitstreamService.getItemBitstreams(context, item); - bitstreamsIterator.forEachRemaining(bitstream -> { - try { - boolean isAuthorized = bitstreamMetadataReadPermissionEvaluatorPlugin - .metadataReadPermissionOnBitstream(context, bitstream); - if (isAuthorized) { - List bitstreamLinkset = new ArrayList<>(); - for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { - processor.addLinkSetNodes(context, request, bitstream, bitstreamLinkset); - } - if (!bitstreamLinkset.isEmpty()) { - linksets.add(bitstreamLinkset); - } + private DSpaceObject findObject(Context context, UUID uuid) { + try { + DSpaceObject object = itemService.find(context, uuid); + if (isNull(object)) { + object = bitstreamService.find(context, uuid); + if (isNull(object)) { + throw new ResourceNotFoundException("No such resource: " + uuid); } - } catch (SQLException e) { - log.error(e.getMessage(), e); - } - }); - } - - private void addItemLinksets( - HttpServletRequest request, - Context context, - Item item, - List> linksets - ) { - List linksetNodes = new ArrayList<>(); - if (item.getType() == Constants.ITEM) { - for (ItemSignpostingProcessor sp : itemProcessors) { - sp.addLinkSetNodes(context, request, item, linksetNodes); + } else { + verifyItemIsDiscoverable((Item) object); } + return object; + } catch (SQLException e) { + throw new RuntimeException(e); } - linksets.add(linksetNodes); } private static void verifyItemIsDiscoverable(Item item) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java index 5fbd10a3022a..3ba09bf1094c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java @@ -28,16 +28,13 @@ public class TypedLinkRest extends RestAddressableModel { private String type; - private String anchor; - public TypedLinkRest() { } - public TypedLinkRest(String href, LinksetRelationType rel, String type, String anchor) { + public TypedLinkRest(String href, LinksetRelationType rel, String type) { this.href = href; this.rel = rel; this.type = type; - this.anchor = anchor; } public String getHref() { @@ -60,14 +57,6 @@ public void setType(String type) { this.type = type; } - public String getAnchor() { - return anchor; - } - - public void setAnchor(String anchor) { - this.anchor = anchor; - } - @Override public String getType() { return type; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java index 78d95cddb88d..a16770c4d103 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java @@ -7,23 +7,15 @@ */ package org.dspace.app.rest.signposting.processor.item; -import static java.util.Objects.isNull; -import static java.util.Objects.nonNull; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.text.MessageFormat; -import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import org.apache.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; -import org.dspace.app.rest.signposting.model.MetadataConfiguration; import org.dspace.content.Item; -import org.dspace.content.MetadataValue; -import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; import org.dspace.util.FrontendUrlService; /** @@ -33,56 +25,26 @@ public class ItemDescribedbyProcessor extends ItemSignpostingProcessor { private static final Logger log = Logger.getLogger(ItemDescribedbyProcessor.class); - private List metadataConfigurations; - - private final ItemService itemService; + private final ConfigurationService configurationService; - public ItemDescribedbyProcessor(FrontendUrlService frontendUrlService, ItemService itemService) { + public ItemDescribedbyProcessor(FrontendUrlService frontendUrlService, ConfigurationService configurationService) { super(frontendUrlService); - this.itemService = itemService; + this.configurationService = configurationService; setRelation(LinksetRelationType.DESCRIBED_BY); } @Override public void addLinkSetNodes(Context context, HttpServletRequest request, Item item, List linksetNodes) { - getMetadataConfigurations() - .forEach(metadataHandle -> handleMetadata(context, item, linksetNodes, metadataHandle)); - } - - private void handleMetadata(Context context, - Item item, - List linksetNodes, - MetadataConfiguration metadataConfiguration) { try { - List identifiers = itemService - .getMetadataByMetadataString(item, metadataConfiguration.getMetadataField()); - for (MetadataValue identifier : identifiers) { - if (nonNull(identifier)) { - String identifierValue = identifier.getValue(); - if (isNotBlank(identifierValue)) { - if (isNotBlank(metadataConfiguration.getPattern())) { - identifierValue = MessageFormat.format(metadataConfiguration.getPattern(), identifierValue); - } - LinksetNode node = new LinksetNode(identifierValue, getRelation(), - metadataConfiguration.getMimeType(), buildAnchor(context, item)); - linksetNodes.add(node); - } - } - } + String signpostingPath = configurationService.getProperty("signposting.path"); + String baseUrl = configurationService.getProperty("dspace.ui.url"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String describedByUrl = baseUrl + "/" + signpostingPath + "/describedby/" + item.getID(); + LinksetNode node = new LinksetNode(describedByUrl, getRelation(), mimeType, buildAnchor(context, item)); + linksetNodes.add(node); } catch (Exception e) { log.error(e.getMessage(), e); } } - - public List getMetadataConfigurations() { - if (isNull(metadataConfigurations)) { - metadataConfigurations = new ArrayList<>(); - } - return metadataConfigurations; - } - - public void setMetadataConfigurations(List metadataConfigurations) { - this.metadataConfigurations = metadataConfigurations; - } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java index 8e3751bc2059..baae16b88389 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java @@ -7,18 +7,12 @@ */ package org.dspace.app.rest.signposting.processor.metadata; -import static org.dspace.content.Item.ANY; - import java.util.List; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.lang.StringUtils; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; -import org.dspace.app.rest.signposting.model.MetadataConfiguration; import org.dspace.content.Item; -import org.dspace.content.MetadataFieldName; -import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.util.FrontendUrlService; import org.springframework.beans.factory.annotation.Autowired; @@ -31,8 +25,7 @@ public class MetadataDescribesSignpostingProcessor extends MetadataSignpostingPr @Autowired private FrontendUrlService frontendUrlService; - public MetadataDescribesSignpostingProcessor(ItemService itemService) { - super(itemService); + public MetadataDescribesSignpostingProcessor() { setRelation(LinksetRelationType.DESCRIBES); } @@ -43,11 +36,8 @@ public void addLinkSetNodes( Item item, List linksetNodes ) { - String metadataValue = itemService.getMetadataFirstValue(item, new MetadataFieldName(getMetadataField()), ANY); - if (StringUtils.isNotBlank(metadataValue)) { - String itemUrl = frontendUrlService.generateUrl(context, item); - String anchor = buildAnchor(new MetadataConfiguration(getMetadataField(), getPattern()), item); - linksetNodes.add(new LinksetNode(itemUrl, getRelation(), "text/html", anchor)); - } + String itemUrl = frontendUrlService.generateUrl(context, item); + String anchor = buildAnchor(item); + linksetNodes.add(new LinksetNode(itemUrl, getRelation(), "text/html", anchor)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java index 27f06960e753..7b4e9135f1a8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java @@ -7,18 +7,11 @@ */ package org.dspace.app.rest.signposting.processor.metadata; -import static org.apache.commons.lang.StringUtils.isNotBlank; -import static org.dspace.content.Item.ANY; - -import java.text.MessageFormat; - -import org.apache.logging.log4j.util.Strings; -import org.dspace.app.rest.signposting.model.MetadataConfiguration; import org.dspace.app.rest.signposting.processor.AbstractSignPostingProcessor; import org.dspace.app.rest.signposting.processor.SignPostingProcessor; import org.dspace.content.Item; -import org.dspace.content.MetadataFieldName; -import org.dspace.content.service.ItemService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; /** * An abstract class represents {@link SignPostingProcessor } for a metadata. @@ -26,20 +19,12 @@ public abstract class MetadataSignpostingProcessor extends AbstractSignPostingProcessor implements SignPostingProcessor { - protected final ItemService itemService; - - public MetadataSignpostingProcessor(ItemService itemService) { - this.itemService = itemService; - } + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); - public String buildAnchor(MetadataConfiguration metadataConfiguration, Item item) { - String metadataValue = itemService - .getMetadataFirstValue(item, new MetadataFieldName(metadataConfiguration.getMetadataField()), ANY); - if (isNotBlank(metadataValue)) { - return isNotBlank(metadataConfiguration.getPattern()) - ? MessageFormat.format(metadataConfiguration.getPattern(), metadataValue) - : metadataValue; - } - return Strings.EMPTY; + public String buildAnchor(Item item) { + String baseUrl = configurationService.getProperty("dspace.ui.url"); + String signpostingPath = configurationService.getProperty("signposting.path"); + return baseUrl + "/" + signpostingPath + "/describedby/" + item.getID(); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java new file mode 100644 index 000000000000..33d0c10b7415 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.service; + +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Service for work with linksets. + */ +public interface LinksetService { + + /** + * Returns list of linkset nodes for multiple linksets. + * + * @param request request + * @param context context + * @param item item + * @return two-dimensional list representing a list of lists where each list represents the linkset nodes. + */ + List> createLinksetNodesForMultipleLinksets( + HttpServletRequest request, + Context context, + Item item + ); + + /** + * Returns list of linkset nodes for single linkset. + * + * @param request request + * @param context context + * @param object dspace object + * @return two-dimensional list representing a list of lists where each list represents the linkset nodes. + */ + List createLinksetNodesForSingleLinkset( + HttpServletRequest request, + Context context, + DSpaceObject object + ); +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java new file mode 100644 index 000000000000..e7fa25de93bb --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java @@ -0,0 +1,151 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.service.impl; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.security.BitstreamMetadataReadPermissionEvaluatorPlugin; +import org.dspace.app.rest.signposting.model.LinksetNode; +import org.dspace.app.rest.signposting.processor.bitstream.BitstreamSignpostingProcessor; +import org.dspace.app.rest.signposting.processor.item.ItemSignpostingProcessor; +import org.dspace.app.rest.signposting.processor.metadata.MetadataSignpostingProcessor; +import org.dspace.app.rest.signposting.service.LinksetService; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.utils.DSpace; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * Default implementation of {@link LinksetService}. + */ +@Service +public class LinksetServiceImpl implements LinksetService { + + private static final Logger log = Logger.getLogger(LinksetServiceImpl.class); + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private BitstreamMetadataReadPermissionEvaluatorPlugin bitstreamMetadataReadPermissionEvaluatorPlugin; + + private final List bitstreamProcessors = new DSpace().getServiceManager() + .getServicesByType(BitstreamSignpostingProcessor.class); + + private final List itemProcessors = new DSpace().getServiceManager() + .getServicesByType(ItemSignpostingProcessor.class); + + private final List metadataProcessors = new DSpace().getServiceManager() + .getServicesByType(MetadataSignpostingProcessor.class); + + @Override + public List> createLinksetNodesForMultipleLinksets( + HttpServletRequest request, + Context context, + Item item + ) { + ArrayList> linksets = new ArrayList<>(); + addItemLinksets(request, context, item, linksets); + addBitstreamLinksets(request, context, item, linksets); + addMetadataLinksets(request, context, item, linksets); + return linksets; + } + + @Override + public List createLinksetNodesForSingleLinkset( + HttpServletRequest request, + Context context, + DSpaceObject object + ) { + List linksetNodes = new ArrayList<>(); + if (object.getType() == Constants.ITEM) { + for (ItemSignpostingProcessor processor : itemProcessors) { + processor.addLinkSetNodes(context, request, (Item) object, linksetNodes); + } + } else if (object.getType() == Constants.BITSTREAM) { + for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { + processor.addLinkSetNodes(context, request, (Bitstream) object, linksetNodes); + } + } + return linksetNodes; + } + + private void addItemLinksets( + HttpServletRequest request, + Context context, + Item item, + List> linksets + ) { + List linksetNodes = new ArrayList<>(); + if (item.getType() == Constants.ITEM) { + for (ItemSignpostingProcessor sp : itemProcessors) { + sp.addLinkSetNodes(context, request, item, linksetNodes); + } + } + linksets.add(linksetNodes); + } + + private void addBitstreamLinksets( + HttpServletRequest request, + Context context, + Item item, + ArrayList> linksets + ) { + Iterator bitstreamsIterator = getItemBitstreams(context, item); + bitstreamsIterator.forEachRemaining(bitstream -> { + try { + boolean isAuthorized = bitstreamMetadataReadPermissionEvaluatorPlugin + .metadataReadPermissionOnBitstream(context, bitstream); + if (isAuthorized) { + List bitstreamLinkset = new ArrayList<>(); + for (BitstreamSignpostingProcessor processor : bitstreamProcessors) { + processor.addLinkSetNodes(context, request, bitstream, bitstreamLinkset); + } + if (!bitstreamLinkset.isEmpty()) { + linksets.add(bitstreamLinkset); + } + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + }); + } + + private void addMetadataLinksets( + HttpServletRequest request, + Context context, + Item item, + ArrayList> linksets + ) { + for (MetadataSignpostingProcessor processor : metadataProcessors) { + List metadataLinkset = new ArrayList<>(); + processor.addLinkSetNodes(context, request, item, metadataLinkset); + if (!metadataLinkset.isEmpty()) { + linksets.add(metadataLinkset); + } + } + } + + private Iterator getItemBitstreams(Context context, Item item) { + try { + return bitstreamService.getItemBitstreams(context, item); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index ed3e63dde570..d348a7d3cd51 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -14,7 +14,10 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.text.DateFormat; import java.text.MessageFormat; +import java.text.SimpleDateFormat; +import java.util.Date; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -49,6 +52,7 @@ import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.result.MockMvcResultMatchers; public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { @@ -105,6 +109,7 @@ public void findAllItemsLinksets() throws Exception { public void findOneItemJsonLinksets() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) .withTitle("Item Test") @@ -115,17 +120,13 @@ public void findOneItemJsonLinksets() throws Exception { getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(3))) + Matchers.hasSize(2))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", Matchers.hasToString(url + "/handle/" + item.getHandle()))) .andExpect(jsonPath("$.linkset[0].describedby[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) .andExpect(jsonPath("$.linkset[0].describedby[0].type", - Matchers.hasToString("application/vnd.datacite.datacite+json"))) - .andExpect(jsonPath("$.linkset[0].describedby[1].href", - Matchers.hasToString(url + "/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[0].describedby[1].type", - Matchers.hasToString("text/html"))) + Matchers.hasToString(mimeType))) .andExpect(jsonPath("$.linkset[0].linkset[0].href", Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) .andExpect(jsonPath("$.linkset[0].linkset[0].type", @@ -140,20 +141,14 @@ public void findOneItemJsonLinksets() throws Exception { .andExpect(jsonPath("$.linkset[1].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[1].anchor", - Matchers.hasToString("http://localhost:4000/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[2].describes[0].href", - Matchers.hasToString(url + "/entities/publication/" + item.getID()))) - .andExpect(jsonPath("$.linkset[2].describes[0].type", - Matchers.hasToString("text/html"))) - .andExpect(jsonPath("$.linkset[2].anchor", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) - ; + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); } @Test public void findOneItemJsonLinksetsWithType() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); String articleUri = mapConverterDSpaceToSchemaOrgUri.getValue("Article"); context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) @@ -166,17 +161,13 @@ public void findOneItemJsonLinksetsWithType() throws Exception { getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(3))) + Matchers.hasSize(2))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", Matchers.hasToString(url + "/handle/" + item.getHandle()))) .andExpect(jsonPath("$.linkset[0].describedby[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) .andExpect(jsonPath("$.linkset[0].describedby[0].type", - Matchers.hasToString("application/vnd.datacite.datacite+json"))) - .andExpect(jsonPath("$.linkset[0].describedby[1].href", - Matchers.hasToString(url + "/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[0].describedby[1].type", - Matchers.hasToString("text/html"))) + Matchers.hasToString(mimeType))) .andExpect(jsonPath("$.linkset[0].type", Matchers.hasSize(2))) .andExpect(jsonPath("$.linkset[0].type[0].href", @@ -197,13 +188,7 @@ public void findOneItemJsonLinksetsWithType() throws Exception { .andExpect(jsonPath("$.linkset[1].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[1].anchor", - Matchers.hasToString("http://localhost:4000/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[2].describes[0].href", - Matchers.hasToString(url + "/entities/publication/" + item.getID()))) - .andExpect(jsonPath("$.linkset[2].describes[0].type", - Matchers.hasToString("text/html"))) - .andExpect(jsonPath("$.linkset[2].anchor", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); } @Test @@ -240,7 +225,7 @@ public void findOneItemJsonLinksetsWithLicence() throws Exception { .andExpect(jsonPath("$.linkset[1].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[1].anchor", - Matchers.hasToString("http://localhost:4000/handle/" + item.getHandle()))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); } @Test @@ -275,20 +260,17 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", - Matchers.hasSize(5))) + Matchers.hasSize(4))) .andExpect(jsonPath("$.linkset[0].cite-as[0].href", Matchers.hasToString(url + "/handle/" + item.getHandle()))) .andExpect(jsonPath("$.linkset[0].describedby[0].href", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))) + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) .andExpect(jsonPath("$.linkset[0].describedby[0].type", - Matchers.hasToString("application/vnd.datacite.datacite+json"))) - .andExpect(jsonPath("$.linkset[0].describedby[1].href", - Matchers.hasToString(url + "/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[0].describedby[1].type", - Matchers.hasToString("text/html"))) + Matchers.hasToString(mimeType))) .andExpect(jsonPath("$.linkset[0].item[0].href", Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) .andExpect(jsonPath("$.linkset[0].item[0].type", @@ -343,13 +325,7 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { .andExpect(jsonPath("$.linkset[3].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[3].anchor", - Matchers.hasToString(url + "/handle/" + item.getHandle()))) - .andExpect(jsonPath("$.linkset[4].describes[0].href", - Matchers.hasToString(url + "/entities/publication/" + item.getID()))) - .andExpect(jsonPath("$.linkset[4].describes[0].type", - Matchers.hasToString("text/html"))) - .andExpect(jsonPath("$.linkset[4].anchor", - Matchers.hasToString(MessageFormat.format(doiPattern, doi)))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); } @Test @@ -483,6 +459,7 @@ public void findOneItemLsetLinksets() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); String siteAsRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"cite-as\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + @@ -496,8 +473,9 @@ public void findOneItemLsetLinksets() throws Exception { String jsonLinksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + "/json> ; rel=\"linkset\" ; type=\"application/linkset+json\" ;" + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; - String describedByRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"describedby\" ;" + - " type=\"text/html\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String describedByRelation = "<" + url + "/" + signpostingUrl + "/describedby/" + item.getID() + + "> ; rel=\"describedby\" ;" + " type=\"" + mimeType + "\" ; anchor=\"" + url + + "/entities/publication/" + item.getID() + "\" ,"; String bitstreamCollectionLink = "<" + url + "/entities/publication/" + item.getID() + "> ;" + " rel=\"collection\" ; type=\"text/html\" ; anchor=\"" + url + "/bitstreams/" @@ -511,7 +489,7 @@ public void findOneItemLsetLinksets() throws Exception { String describesMetadataLink = "<" + url + "/entities/publication/" + item.getID() + "> ; " + "rel=\"describes\" ; type=\"text/html\" ; " + - "anchor=\"" + url + "/handle/" + item.getHandle() + "\""; + "anchor=\"" + url + "/" + signpostingUrl + "/describedby/" + item.getID() + "\""; getClient().perform(get("/signposting/linksets/" + item.getID())) .andExpect(content().string(Matchers.containsString(siteAsRelation))) @@ -586,44 +564,36 @@ public void findTypedLinkForItemWithAuthor() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); String dcIdentifierUriMetadataValue = itemService .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); - String anchor = url + "/entities/publication/" + publication.getID(); getClient().perform(get("/signposting/links/" + publication.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.hasSize(8))) + Matchers.hasSize(7))) .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + - "&& @.rel == 'author' " + - "&& @.anchor == '" + anchor + "')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(doiPattern, doi) + "' " + - "&& @.rel == 'describedby' " + - "&& @.anchor == '" + anchor + "')]").exists()) - .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + + "&& @.rel == 'author')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/describedby/" + + publication.getID() + "' " + "&& @.rel == 'describedby' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == '" + mimeType + "')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + - "&& @.rel == 'cite-as' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.rel == 'cite-as')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/bitstreams/" + bitstream.getID() + "/download' " + "&& @.rel == 'item' " + - "&& @.type == 'text/plain' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'text/plain')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + - "&& @.rel == 'type' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.rel == 'type')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + publication.getID().toString() + "' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'application/linkset')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + publication.getID().toString() + "/json' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json' " + - "&& @.anchor == '" + anchor + "')]").exists()); + "&& @.type == 'application/linkset+json')]").exists()); } @Test @@ -648,7 +618,6 @@ public void findTypedLinkForBitstream() throws Exception { context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); - String anchor = uiUrl + "/bitstreams/" + bitstream.getID() + "/download"; getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) @@ -656,16 +625,13 @@ public void findTypedLinkForBitstream() throws Exception { Matchers.hasSize(3))) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + "&& @.rel == 'collection' " + - "&& @.type == 'text/html' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'text/html')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'application/linkset')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json' " + - "&& @.anchor == '" + anchor + "')]").exists()); + "&& @.type == 'application/linkset+json')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -696,7 +662,6 @@ public void findTypedLinkForBitstreamWithType() throws Exception { context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); - String anchor = uiUrl + "/bitstreams/" + bitstream.getID() + "/download"; getClient().perform(get("/signposting/links/" + bitstream.getID()) .header("Accept", "application/json")) .andExpect(status().isOk()) @@ -704,19 +669,15 @@ public void findTypedLinkForBitstreamWithType() throws Exception { Matchers.hasSize(4))) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + "&& @.rel == 'collection' " + - "&& @.type == 'text/html' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'text/html')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + "&& @.rel == 'linkset' " + - "&& @.type == 'application/linkset' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'application/linkset')]").exists()) .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + "' && @.rel == 'linkset' " + - "&& @.type == 'application/linkset+json' " + - "&& @.anchor == '" + anchor + "')]").exists()) + "&& @.type == 'application/linkset+json')]").exists()) .andExpect(jsonPath("$[?(@.href == 'https://schema.org/ScholarlyArticle' " + - "&& @.rel == 'type' " + - "&& @.anchor == '" + anchor + "')]").exists()); + "&& @.rel == 'type')]").exists()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); metadataAuthorityService.clearCache(); @@ -837,4 +798,111 @@ public void findTypedLinkForUnDiscoverableItem() throws Exception { choiceAuthorityService.clearCache(); } + @Test + public void getDescribedBy() throws Exception { + context.turnOffAuthorisationSystem(); + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + String currentDateInFormat = dateFormat.format(new Date()); + String title = "Item Test"; + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "identifier", "doi", doi) + .build(); + String responseMimeType = configurationService.getProperty("signposting.describedby.mime-type"); + context.restoreAuthSystemState(); + + String titleXml = "" + title + ""; + String doiXml = "" + doi + ""; + String handleXml = "http://localhost:4000/handle/" + item.getHandle() + + ""; + String acceptedDateXml = "" + + currentDateInFormat + ""; + String availableDateXml = "" + + currentDateInFormat + ""; + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isOk()) + .andExpect(content().string(Matchers.containsString(titleXml))) + .andExpect(content().string(Matchers.containsString(doiXml))) + .andExpect(content().string(Matchers.containsString(handleXml))) + .andExpect(content().string(Matchers.containsString(acceptedDateXml))) + .andExpect(content().string(Matchers.containsString(availableDateXml))) + .andExpect(MockMvcResultMatchers.header() + .stringValues("Content-Type", responseMimeType + ";charset=UTF-8")); + } + + @Test + public void getDescribedByItemThatIsInWorkspace() throws Exception { + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + itemService.addMetadata(context, workspaceItem.getItem(), "dc", "identifier", "doi", Item.ANY, doi); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + workspaceItem.getItem().getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByWithdrawnItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withdrawn() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByEmbargoItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withIssueDate("2017-11-18") + .withEmbargoPeriod("2 week") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByRestrictedItem() throws Exception { + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withReaderGroup(internalGroup) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByUnDiscoverableItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } } diff --git a/dspace/config/modules/signposting.cfg b/dspace/config/modules/signposting.cfg index 2265294981a2..fba80da41481 100644 --- a/dspace/config/modules/signposting.cfg +++ b/dspace/config/modules/signposting.cfg @@ -26,4 +26,10 @@ signposting.path = signposting # When "true", the signposting controller is accessible on ${signposting.path} # When "false" or commented out, signposting is disabled/inaccessible. # (Requires reboot of servlet container, e.g. Tomcat, to reload) -signposting.enabled = true \ No newline at end of file +signposting.enabled = true + +# Name of crosswalk to use for handling of 'describedby' links. +signposting.describedby.crosswalk-name = DataCite + +# Mime-type of response of handling of 'describedby' links. +signposting.describedby.mime-type = application/vnd.datacite.datacite+xml \ No newline at end of file diff --git a/dspace/config/spring/rest/signposting.xml b/dspace/config/spring/rest/signposting.xml index 51060590d280..72109dbe26e5 100644 --- a/dspace/config/spring/rest/signposting.xml +++ b/dspace/config/spring/rest/signposting.xml @@ -1,7 +1,6 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -23,20 +22,7 @@ - - - - - - - - - - - - - + class="org.dspace.app.rest.signposting.processor.item.ItemDescribedbyProcessor"/> @@ -59,14 +45,6 @@ - - - - - - - - + From 12fe87fc372f76e11f5b9f7c1c6c4ac7d96e2a6e Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Fri, 23 Jun 2023 09:46:46 +0200 Subject: [PATCH 310/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../controller/LinksetRestControllerIT.java | 25 +++++-------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index d348a7d3cd51..812a1caa33c7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -109,7 +109,7 @@ public void findAllItemsLinksets() throws Exception { public void findOneItemJsonLinksets() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); - String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String mimeType = "application/vnd.datacite.datacite+xml"; context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) .withTitle("Item Test") @@ -148,7 +148,7 @@ public void findOneItemJsonLinksets() throws Exception { public void findOneItemJsonLinksetsWithType() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); - String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String mimeType = "application/vnd.datacite.datacite+xml"; String articleUri = mapConverterDSpaceToSchemaOrgUri.getValue("Article"); context.turnOffAuthorisationSystem(); Item item = ItemBuilder.createItem(context, collection) @@ -260,7 +260,7 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); - String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String mimeType = "application/vnd.datacite.datacite+xml"; getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) .andExpect(status().isOk()) .andExpect(jsonPath("$.linkset", @@ -459,7 +459,7 @@ public void findOneItemLsetLinksets() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); - String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String mimeType = "application/vnd.datacite.datacite+xml"; String siteAsRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"cite-as\" ; anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + @@ -564,7 +564,7 @@ public void findTypedLinkForItemWithAuthor() throws Exception { String url = configurationService.getProperty("dspace.ui.url"); String signpostingUrl = configurationService.getProperty("signposting.path"); - String mimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String mimeType = "application/vnd.datacite.datacite+xml"; String dcIdentifierUriMetadataValue = itemService .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); @@ -808,27 +808,14 @@ public void getDescribedBy() throws Exception { .withTitle(title) .withMetadata("dc", "identifier", "doi", doi) .build(); - String responseMimeType = configurationService.getProperty("signposting.describedby.mime-type"); + String responseMimeType = "application/vnd.datacite.datacite+xml"; context.restoreAuthSystemState(); String titleXml = "" + title + ""; - String doiXml = "" + doi + ""; - String handleXml = "http://localhost:4000/handle/" + item.getHandle() + - ""; - String acceptedDateXml = "" + - currentDateInFormat + ""; - String availableDateXml = "" + - currentDateInFormat + ""; getClient().perform(get("/signposting/describedby/" + item.getID())) .andExpect(status().isOk()) .andExpect(content().string(Matchers.containsString(titleXml))) - .andExpect(content().string(Matchers.containsString(doiXml))) - .andExpect(content().string(Matchers.containsString(handleXml))) - .andExpect(content().string(Matchers.containsString(acceptedDateXml))) - .andExpect(content().string(Matchers.containsString(availableDateXml))) .andExpect(MockMvcResultMatchers.header() .stringValues("Content-Type", responseMimeType + ";charset=UTF-8")); } From 890f2a15e52eaaefe75273bff266448518edbc40 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Fri, 23 Jun 2023 11:45:07 +0200 Subject: [PATCH 311/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../controller/LinksetRestController.java | 13 ++++-- .../controller/LinksetRestControllerIT.java | 45 +++++++++---------- 2 files changed, 30 insertions(+), 28 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java index 98642bb703ed..2a940d79aba4 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java @@ -88,8 +88,11 @@ public ResponseEntity getAll() { } @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") - @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/json", - method = RequestMethod.GET) + @RequestMapping( + value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/json", + method = RequestMethod.GET, + produces = "application/linkset+json" + ) public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) { try { Context context = ContextUtil.obtainContext(request); @@ -109,7 +112,11 @@ public LinksetRest getJson(HttpServletRequest request, @PathVariable UUID uuid) } @PreAuthorize("hasPermission(#uuid, 'ITEM', 'READ')") - @RequestMapping(value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, method = RequestMethod.GET) + @RequestMapping( + value = "/linksets" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, + method = RequestMethod.GET, + produces = "application/linkset" + ) public String getLset(HttpServletRequest request, @PathVariable UUID uuid) { try { Context context = ContextUtil.obtainContext(request); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 812a1caa33c7..851752793410 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -10,6 +10,7 @@ import static org.dspace.content.MetadataSchemaEnum.PERSON; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -52,7 +53,6 @@ import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.web.servlet.result.MockMvcResultMatchers; public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { @@ -141,7 +141,8 @@ public void findOneItemJsonLinksets() throws Exception { .andExpect(jsonPath("$.linkset[1].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[1].anchor", - Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); } @Test @@ -188,7 +189,8 @@ public void findOneItemJsonLinksetsWithType() throws Exception { .andExpect(jsonPath("$.linkset[1].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[1].anchor", - Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); } @Test @@ -225,7 +227,8 @@ public void findOneItemJsonLinksetsWithLicence() throws Exception { .andExpect(jsonPath("$.linkset[1].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[1].anchor", - Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); } @Test @@ -325,7 +328,8 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { .andExpect(jsonPath("$.linkset[3].describes[0].type", Matchers.hasToString("text/html"))) .andExpect(jsonPath("$.linkset[3].anchor", - Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))); + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); } @Test @@ -501,7 +505,8 @@ public void findOneItemLsetLinksets() throws Exception { .andExpect(content().string(Matchers.containsString(bitstreamCollectionLink))) .andExpect(content().string(Matchers.containsString(bitstreamLinksetLink))) .andExpect(content().string(Matchers.containsString(bitstreamLinksetJsonLink))) - .andExpect(content().string(Matchers.containsString(describesMetadataLink))); + .andExpect(content().string(Matchers.containsString(describesMetadataLink))) + .andExpect(header().stringValues("Content-Type", "application/linkset;charset=UTF-8")); } @Test @@ -568,8 +573,7 @@ public void findTypedLinkForItemWithAuthor() throws Exception { String dcIdentifierUriMetadataValue = itemService .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); - getClient().perform(get("/signposting/links/" + publication.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + publication.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.hasSize(7))) @@ -618,8 +622,7 @@ public void findTypedLinkForBitstream() throws Exception { context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); - getClient().perform(get("/signposting/links/" + bitstream.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + bitstream.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.hasSize(3))) @@ -662,8 +665,7 @@ public void findTypedLinkForBitstreamWithType() throws Exception { context.restoreAuthSystemState(); String uiUrl = configurationService.getProperty("dspace.ui.url"); - getClient().perform(get("/signposting/links/" + bitstream.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + bitstream.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.hasSize(4))) @@ -709,8 +711,7 @@ public void findTypedLinkForRestrictedBitstream() throws Exception { } context.restoreAuthSystemState(); - getClient().perform(get("/signposting/links/" + bitstream.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + bitstream.getID())) .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); @@ -741,8 +742,7 @@ public void findTypedLinkForBitstreamUnderEmbargo() throws Exception { } context.restoreAuthSystemState(); - getClient().perform(get("/signposting/links/" + bitstream.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + bitstream.getID())) .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); @@ -772,8 +772,7 @@ public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { } context.restoreAuthSystemState(); - getClient().perform(get("/signposting/links/" + bitstream.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + bitstream.getID())) .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); @@ -789,8 +788,7 @@ public void findTypedLinkForUnDiscoverableItem() throws Exception { .build(); context.restoreAuthSystemState(); - getClient().perform(get("/signposting/links/" + item.getID()) - .header("Accept", "application/json")) + getClient().perform(get("/signposting/links/" + item.getID())) .andExpect(status().isUnauthorized()); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); @@ -811,13 +809,10 @@ public void getDescribedBy() throws Exception { String responseMimeType = "application/vnd.datacite.datacite+xml"; context.restoreAuthSystemState(); - String titleXml = "" + title + ""; - getClient().perform(get("/signposting/describedby/" + item.getID())) .andExpect(status().isOk()) - .andExpect(content().string(Matchers.containsString(titleXml))) - .andExpect(MockMvcResultMatchers.header() - .stringValues("Content-Type", responseMimeType + ";charset=UTF-8")); + .andExpect(content().string(Matchers.containsString(title))) + .andExpect(header().stringValues("Content-Type", responseMimeType + ";charset=UTF-8")); } @Test From df4cff7dd1d79ef1e428b784329034576542ee1f Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Fri, 23 Jun 2023 17:24:42 +0200 Subject: [PATCH 312/686] [DURACOM-156] Singposting: feedback after initial deployment --- .../service/impl/LinksetServiceImpl.java | 8 +- .../controller/LinksetRestControllerIT.java | 100 ++++++++++++++++++ 2 files changed, 105 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java index e7fa25de93bb..399b7bd1e6b0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java @@ -21,9 +21,10 @@ import org.dspace.app.rest.signposting.processor.metadata.MetadataSignpostingProcessor; import org.dspace.app.rest.signposting.service.LinksetService; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; -import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.utils.DSpace; @@ -39,7 +40,7 @@ public class LinksetServiceImpl implements LinksetService { private static final Logger log = Logger.getLogger(LinksetServiceImpl.class); @Autowired - private BitstreamService bitstreamService; + protected ItemService itemService; @Autowired private BitstreamMetadataReadPermissionEvaluatorPlugin bitstreamMetadataReadPermissionEvaluatorPlugin; @@ -143,7 +144,8 @@ private void addMetadataLinksets( private Iterator getItemBitstreams(Context context, Item item) { try { - return bitstreamService.getItemBitstreams(context, item); + List bundles = itemService.getBundles(item, Constants.DEFAULT_BUNDLE_NAME); + return bundles.stream().flatMap(bundle -> bundle.getBitstreams().stream()).iterator(); } catch (SQLException e) { throw new RuntimeException(e); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java index 851752793410..6d1d242cad7f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -45,6 +45,7 @@ import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipTypeService; +import org.dspace.core.Constants; import org.dspace.eperson.Group; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -332,6 +333,105 @@ public void findOneItemJsonLinksetsWithBitstreams() throws Exception { .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); } + @Test + public void findOneItemJsonLinksetsWithBitstreamsFromDifferentBundles() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is, Constants.DEFAULT_BUNDLE_NAME) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream2 = BitstreamBuilder.createBitstream(context, item, is, "TEXT") + .withName("Bitstream 2") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream3 = BitstreamBuilder.createBitstream(context, item, is, "THUMBNAIL") + .withName("Bitstream 3") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream4 = BitstreamBuilder.createBitstream(context, item, is, "LICENSE") + .withName("Bitstream 4") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(3))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].item", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].item[0].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[0].type", + Matchers.hasToString(bitstream1MimeType))) + .andExpect(jsonPath("$.linkset[0].anchor", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[1].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[2].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + @Test public void findOneItemThatIsInWorkspaceJsonLinksets() throws Exception { context.turnOffAuthorisationSystem(); From 01c761281c00b72ba4137900ac7704b1dc08dfd0 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 23 Jun 2023 11:50:12 -0500 Subject: [PATCH 313/686] Update LICENSES_THIRD_PARTY for 7.6 --- LICENSES_THIRD_PARTY | 381 +++++++++++++++++++++++-------------------- 1 file changed, 200 insertions(+), 181 deletions(-) diff --git a/LICENSES_THIRD_PARTY b/LICENSES_THIRD_PARTY index b96ea77648a6..e494c80c5d6e 100644 --- a/LICENSES_THIRD_PARTY +++ b/LICENSES_THIRD_PARTY @@ -26,7 +26,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava) * JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava) * HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc) - * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/) + * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.18.0 - https://drewnoakes.com/code/exif/) * parso (com.epam:parso:2.0.14 - https://github.com/epam/parso) * Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java) * ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate) @@ -34,12 +34,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core) * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson) * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary) - * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.3 - http://github.com/FasterXML/jackson-dataformats-binary) * Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text) - * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) * Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator) * Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox) * zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/) @@ -56,19 +56,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics) * FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/) * Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson) - * error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + * error-prone annotations (com.google.errorprone:error_prone_annotations:2.18.0 - https://errorprone.info/error_prone_annotations) * Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) - * Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava) + * Guava: Google Core Libraries for Java (com.google.guava:guava:32.0.0-jre - https://github.com/google/guava) * Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5) * Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) * Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client) * GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson) * Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2) - * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) + * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/) * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) * ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap) * libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/) - * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io) + * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.2 - https://jackcess.sourceforge.io) * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net) * project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath) * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath) @@ -79,11 +79,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt) * opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net) * java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst) - * rome (com.rometools:rome:1.18.0 - http://rometools.com/rome) - * rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules) - * rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils) + * rome (com.rometools:rome:1.19.0 - http://rometools.com/rome) + * rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules) + * rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils) * fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net) * T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest) + * config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config) + * ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config) + * akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/) + * akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io) + * akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io) + * akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io) + * akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/) + * akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/) + * scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging) * JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) * SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) @@ -91,20 +100,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/) * Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/) * Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/) - * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/) + * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/) * Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/) * Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/) * Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/) * Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/) * GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson) - * Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/) * OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu) * Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core) * Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite) * Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9) * Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx) * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm) - * micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer) + * micrometer-core (io.micrometer:micrometer-core:1.9.11 - https://github.com/micrometer-metrics/micrometer) * Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/) * Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/) * Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/) @@ -188,88 +196,87 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util) * Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1) * Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix) - * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) - * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/) - * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/) - * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/) - * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) - * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) - * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/) - * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) - * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) - * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) - * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) - * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) - * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) - * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) - * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) - * Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification) - * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) - * Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core) - * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) - * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) - * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) - * Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join) - * Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory) - * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc) - * Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries) - * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) - * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) - * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) - * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) - * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) - * Apache FontBox (org.apache.pdfbox:fontbox:2.0.27 - http://pdfbox.apache.org/) - * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/) - * Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/) - * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox/) - * Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/) - * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) - * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/) - * Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/) - * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/) - * Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/) - * Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core) - * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj) + * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) + * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-api/) + * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-core/) + * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-jul/) + * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) + * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) + * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-web/) + * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) + * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) + * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) + * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) + * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) + * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) + * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) + * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) + * Lucene Classification (org.apache.lucene:lucene-classification:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-classification) + * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-codecs) + * Lucene Core (org.apache.lucene:lucene-core:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-core) + * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-expressions) + * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-grouping) + * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-highlighter) + * Lucene Join (org.apache.lucene:lucene-join:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-join) + * Lucene Memory (org.apache.lucene:lucene-memory:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-memory) + * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-misc) + * Lucene Queries (org.apache.lucene:lucene-queries:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queries) + * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queryparser) + * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-sandbox) + * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) + * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) + * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-suggest) + * Apache FontBox (org.apache.pdfbox:fontbox:2.0.28 - http://pdfbox.apache.org/) + * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/) + * Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/) + * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.28 - https://www.apache.org/pdfbox-parent/pdfbox/) + * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) + * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.27 - https://www.apache.org/pdfbox-parent/xmpbox/) + * Apache POI - Common (org.apache.poi:poi:5.2.3 - https://poi.apache.org/) + * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-scratchpad:5.2.3 - https://poi.apache.org/) + * Apache Solr Core (org.apache.solr:solr-core:8.11.2 - https://lucene.apache.org/solr-parent/solr-core) + * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.2 - https://lucene.apache.org/solr-parent/solr-solrj) * Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl) * Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec) * Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org) - * Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/) - * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/) - * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/) - * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/) - * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/) - * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/) - * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/) - * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/) - * Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/) - * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/) - * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/) - * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/) - * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/) - * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/) - * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/) - * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/) - * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/) - * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/) - * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/) - * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/) - * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/) - * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/) - * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/) - * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) - * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/) - * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/) - * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/) + * Apache Tika core (org.apache.tika:tika-core:2.5.0 - https://tika.apache.org/) + * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.5.0 - https://tika.apache.org/tika-parser-apple-module/) + * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.5.0 - https://tika.apache.org/tika-parser-audiovideo-module/) + * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.5.0 - https://tika.apache.org/tika-parser-cad-module/) + * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.5.0 - https://tika.apache.org/tika-parser-code-module/) + * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.5.0 - https://tika.apache.org/tika-parser-crypto-module/) + * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.5.0 - https://tika.apache.org/tika-parser-digest-commons/) + * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.5.0 - https://tika.apache.org/tika-parser-font-module/) + * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.5.0 - https://tika.apache.org/tika-parser-html-module/) + * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.5.0 - https://tika.apache.org/tika-parser-image-module/) + * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.5.0 - https://tika.apache.org/tika-parser-mail-commons/) + * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.5.0 - https://tika.apache.org/tika-parser-mail-module/) + * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.5.0 - https://tika.apache.org/tika-parser-microsoft-module/) + * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.5.0 - https://tika.apache.org/tika-parser-miscoffice-module/) + * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.5.0 - https://tika.apache.org/tika-parser-news-module/) + * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.5.0 - https://tika.apache.org/tika-parser-ocr-module/) + * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.5.0 - https://tika.apache.org/tika-parser-pdf-module/) + * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.5.0 - https://tika.apache.org/tika-parser-pkg-module/) + * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.5.0 - https://tika.apache.org/tika-parser-text-module/) + * Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.5.0 - https://tika.apache.org/tika-parser-webarchive-module/) + * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.5.0 - https://tika.apache.org/tika-parser-xml-module/) + * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.5.0 - https://tika.apache.org/tika-parser-xmp-commons/) + * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.5.0 - https://tika.apache.org/tika-parser-zip-commons/) + * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.5.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) + * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.75 - https://tomcat.apache.org/) * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/) * Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/) * Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/) * Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/) - * XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/) + * XmlBeans (org.apache.xmlbeans:xmlbeans:5.1.1 - https://xmlbeans.apache.org/) * Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper) * Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute) * org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) - * AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/) + * AssertJ fluent assertions (org.assertj:assertj-core:3.22.0 - https://assertj.github.io/doc/assertj-core/) * Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector) * jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/) * TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/) @@ -279,34 +286,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org) * flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core) @@ -315,8 +322,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator) - * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator) + * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb) + * leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex) * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org) @@ -337,59 +346,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) * Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api) + * jwarc (org.netpreserve:jwarc:0.19.0 - https://github.com/iipc/jwarc) * Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis) * parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org) * parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org) * RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/) - * JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) - * Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework) - * spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) + * Scala Library (org.scala-lang:scala-library:2.13.9 - https://www.scala-lang.org/) + * Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/) + * scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/) + * scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/) + * scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/) + * scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/) + * JSONassert (org.skyscreamer:jsonassert:1.5.1 - https://github.com/skyscreamer/JSONassert) + * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.36 - http://www.slf4j.org) + * Spring AOP (org.springframework:spring-aop:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Beans (org.springframework:spring-beans:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context (org.springframework:spring-context:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context Support (org.springframework:spring-context-support:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Core (org.springframework:spring-core:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring JDBC (org.springframework:spring-jdbc:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring TestContext Framework (org.springframework:spring-test:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Transaction (org.springframework:spring-tx:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web (org.springframework:spring-web:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web MVC (org.springframework:spring-webmvc:5.3.27 - https://github.com/spring-projects/spring-framework) + * spring-boot (org.springframework.boot:spring-boot:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) * Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor) - * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) - * Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons) - * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) - * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) - * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas) + * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test (org.springframework.boot:spring-boot-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * Spring Data Core (org.springframework.data:spring-data-commons:2.7.12 - https://www.spring.io/spring-data/spring-data-commons) + * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) + * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) + * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.5.4 - https://github.com/spring-projects/spring-hateoas) * Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core) - * spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-config (org.springframework.security:spring-security-config:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-core (org.springframework.security:spring-security-core:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-crypto (org.springframework.security:spring-security-crypto:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-test (org.springframework.security:spring-security-test:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-web (org.springframework.security:spring-security-web:5.7.8 - https://spring.io/projects/spring-security) * SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/) * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) * xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/) * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/) - * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/) + * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/) * org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/) - * SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org) + * SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml) * software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/) * Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/) * xalan (xalan:xalan:2.7.0 - no url defined) @@ -404,7 +421,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security) * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) * JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/) - * curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi) + * curvesapi (com.github.virtuald:curvesapi:1.07 - https://github.com/virtuald/curvesapi) * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) * JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/) * dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org) @@ -426,11 +443,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) * asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/) * asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/) - * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.3 - https://jdbc.postgresql.org) + * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.6.0 - https://jdbc.postgresql.org) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio) * XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/) + CC0: + + * reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/) + Common Development and Distribution License (CDDL): * istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/) @@ -446,7 +467,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net) * javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net) * jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) * HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils) @@ -489,34 +510,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) @@ -542,10 +563,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) - * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm) - * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm) - * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) + * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.15.Final - https://hibernate.org/orm) * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org) * im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) @@ -562,9 +583,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines MIT License: + * better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files) * Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver) - * dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist) + * dd-plist (com.googlecode.plist:dd-plist:1.25 - http://www.github.com/3breadt/dd-plist) * DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis) + * s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock) * JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html) @@ -572,15 +595,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec) * Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org) - * Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org) + * Checker Qual (org.checkerframework:checker-qual:3.31.0 - https://checkerframework.org) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito) * mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito) * ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model) - * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org) - * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org) - * SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org) + * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.36 - http://www.slf4j.org) + * SLF4J API Module (org.slf4j:slf4j-api:1.7.36 - http://www.slf4j.org) * SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org) * HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org) * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) @@ -589,7 +611,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org) * urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org) * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org) - * core-js (org.webjars.npm:core-js:3.28.0 - https://www.webjars.org) + * core-js (org.webjars.npm:core-js:3.30.1 - https://www.webjars.org) * @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org) Mozilla Public License: @@ -606,17 +628,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * JSON in Java (org.json:json:20230227 - https://github.com/douglascrockford/JSON-java) * LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html) - The JSON License: - - * JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java) - UnRar License: - * Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar) + * Java Unrar (com.github.junrar:junrar:7.5.3 - https://github.com/junrar/junrar) Unicode/ICU License: From 5a43e6bcf1adb1e053be4dd46b566c604923c8a9 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 23 Jun 2023 12:10:53 -0500 Subject: [PATCH 314/686] [maven-release-plugin] prepare release dspace-7.6 --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index d41c51638c3a..d0d2bf8608ae 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 7e26e22fa2ea..8ffa26e96751 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 8692482d7889..8b6a3ee41486 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 95354621aa13..80c0614781c9 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index 7fdf21ef4ce6..dcc47b175369 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - 7.6-SNAPSHOT + 7.6 DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 99aa88bebf0c..6f4b2871df9c 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index f3112b049bdd..488fa05c9633 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index fd68337bbc3a..02f293dbbb9d 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 9badeb2fe80a..3bc838d3ea22 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index dd98bf0cbdd1..100561849da1 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index b60246ba6cc9..c12b13a38df9 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index 115393b7db20..4f3e7cc1c6b2 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 41ddb94be5a9..c3921bfa2704 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - 7.6-SNAPSHOT + 7.6 .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 7916648e4784..75f16a420597 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - 7.6-SNAPSHOT + 7.6 ../pom.xml diff --git a/pom.xml b/pom.xml index f8e873c8d4d5..432a928b5b96 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - 7.6-SNAPSHOT + 7.6 DSpace Parent Project DSpace open source software is a turnkey institutional repository application. @@ -872,14 +872,14 @@ org.dspace dspace-rest - 7.6-SNAPSHOT + 7.6 jar classes org.dspace dspace-rest - 7.6-SNAPSHOT + 7.6 war @@ -1030,69 +1030,69 @@ org.dspace dspace-api - 7.6-SNAPSHOT + 7.6 org.dspace dspace-api test-jar - 7.6-SNAPSHOT + 7.6 test org.dspace.modules additions - 7.6-SNAPSHOT + 7.6 org.dspace dspace-sword - 7.6-SNAPSHOT + 7.6 org.dspace dspace-swordv2 - 7.6-SNAPSHOT + 7.6 org.dspace dspace-oai - 7.6-SNAPSHOT + 7.6 org.dspace dspace-services - 7.6-SNAPSHOT + 7.6 org.dspace dspace-server-webapp test-jar - 7.6-SNAPSHOT + 7.6 test org.dspace dspace-rdf - 7.6-SNAPSHOT + 7.6 org.dspace dspace-iiif - 7.6-SNAPSHOT + 7.6 org.dspace dspace-server-webapp - 7.6-SNAPSHOT + 7.6 jar classes org.dspace dspace-server-webapp - 7.6-SNAPSHOT + 7.6 war @@ -1927,7 +1927,7 @@ scm:git:git@github.com:DSpace/DSpace.git scm:git:git@github.com:DSpace/DSpace.git git@github.com:DSpace/DSpace.git - HEAD + dspace-7.6 From a533704a27ed97f16125590c4569589991119356 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 23 Jun 2023 12:10:56 -0500 Subject: [PATCH 315/686] [maven-release-plugin] prepare for next development iteration --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index d0d2bf8608ae..0e70fc52e085 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 8ffa26e96751..30e20489b82c 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 8b6a3ee41486..08e732d45733 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 80c0614781c9..09c3e704de42 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index dcc47b175369..320567f3c104 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - 7.6 + 7.6.1-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 6f4b2871df9c..2221f9ca0e62 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 488fa05c9633..fe7c6ab8b69d 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 02f293dbbb9d..777041775d46 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 3bc838d3ea22..c3fbdecb79d4 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 100561849da1..dd6dbcb1ffd7 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index c12b13a38df9..e66622ce6b34 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index 4f3e7cc1c6b2..14c6ff93f761 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index c3921bfa2704..bebb6d183d7e 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - 7.6 + 7.6.1-SNAPSHOT .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 75f16a420597..bfa21f5d664f 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - 7.6 + 7.6.1-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 432a928b5b96..4c9799243f8c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - 7.6 + 7.6.1-SNAPSHOT DSpace Parent Project DSpace open source software is a turnkey institutional repository application. @@ -872,14 +872,14 @@ org.dspace dspace-rest - 7.6 + 7.6.1-SNAPSHOT jar classes org.dspace dspace-rest - 7.6 + 7.6.1-SNAPSHOT war @@ -1030,69 +1030,69 @@ org.dspace dspace-api - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-api test-jar - 7.6 + 7.6.1-SNAPSHOT test org.dspace.modules additions - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-sword - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-swordv2 - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-oai - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-services - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-server-webapp test-jar - 7.6 + 7.6.1-SNAPSHOT test org.dspace dspace-rdf - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-iiif - 7.6 + 7.6.1-SNAPSHOT org.dspace dspace-server-webapp - 7.6 + 7.6.1-SNAPSHOT jar classes org.dspace dspace-server-webapp - 7.6 + 7.6.1-SNAPSHOT war @@ -1927,7 +1927,7 @@ scm:git:git@github.com:DSpace/DSpace.git scm:git:git@github.com:DSpace/DSpace.git git@github.com:DSpace/DSpace.git - dspace-7.6 + HEAD From 2b5c4a5f4a80fb8a311437676bc351dc75d257f0 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Thu, 29 Jun 2023 13:00:59 +0200 Subject: [PATCH 316/686] Improve performance of automatic discovery re-index after database changes --- .../src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index 89010a73087f..0732eea2a0b9 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -1465,6 +1465,7 @@ public void run() { Context context = null; try { context = new Context(); + context.setMode(Context.Mode.READ_ONLY); context.turnOffAuthorisationSystem(); log.info( "Post database migration, reindexing all content in Discovery search and browse engine"); From af55090245baf3313407ae1c103b2db53b97b9d4 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Thu, 29 Jun 2023 14:31:39 +0200 Subject: [PATCH 317/686] Ignore vocabulary indexes without discovery facet to avoid NPE --- .../content/authority/ChoiceAuthorityServiceImpl.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index ec8f8769be52..4cac1da31490 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -585,6 +585,12 @@ public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { break; } } + + // If there is no matching facet, return null to ignore this vocabulary index + if (matchingFacet == null) { + return null; + } + DSpaceControlledVocabularyIndex vocabularyIndex = new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, matchingFacet); From ea4565bd6016d795b2cb89834c65acc8e0e0c977 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 30 Jun 2023 14:37:06 -0500 Subject: [PATCH 318/686] [maven-release-plugin] prepare branch dspace-7_x --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4c9799243f8c..3873fa372c66 100644 --- a/pom.xml +++ b/pom.xml @@ -1927,7 +1927,7 @@ scm:git:git@github.com:DSpace/DSpace.git scm:git:git@github.com:DSpace/DSpace.git git@github.com:DSpace/DSpace.git - HEAD + dspace-7_x From 1b94aecb26059b35b762ff5285beecddd2e0d08f Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Thu, 6 Jul 2023 16:48:10 +0200 Subject: [PATCH 319/686] 103837: Only send GA4 events for ORIGINAL bitstreams + ITs --- .../google/GoogleAsyncEventListener.java | 32 +++++- .../google/GoogleAsyncEventListenerIT.java | 105 +++++++++++++++++- dspace/config/dspace.cfg | 7 +- 3 files changed, 133 insertions(+), 11 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java index c169e4712f7f..e84d9f8591f2 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java @@ -22,6 +22,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.Bitstream; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.core.Context; @@ -77,7 +78,7 @@ public void receiveEvent(Event event) { UsageEvent usageEvent = (UsageEvent) event; LOGGER.debug("Usage event received " + event.getName()); - if (isNotBitstreamViewEvent(usageEvent)) { + if (!isContentBitstream(usageEvent)) { return; } @@ -171,9 +172,32 @@ private String getDocumentPath(HttpServletRequest request) { return documentPath; } - private boolean isNotBitstreamViewEvent(UsageEvent usageEvent) { - return usageEvent.getAction() != UsageEvent.Action.VIEW - || usageEvent.getObject().getType() != Constants.BITSTREAM; + /** + * Verifies if the usage event is a content bitstream view event, by checking if:
    + *
  • the usage event is a view event
  • + *
  • the object of the usage event is a bitstream
  • + *
  • the bitstream belongs to the ORIGINAL bundle
+ * This last one can be skipped if 'google-analytics.exclude-non-content-bitstreams' is set to false. + * This will make it so the bundle name is completely ignored when sending events. + */ + private boolean isContentBitstream(UsageEvent usageEvent) { + // check if event is a VIEW event and object is a Bitstream + if (usageEvent.getAction() == UsageEvent.Action.VIEW + || usageEvent.getObject().getType() == Constants.BITSTREAM) { + // check if config is set to true + if (configurationService.getBooleanProperty("google-analytics.exclude-non-content-bitstreams")) { + try { + // check if bitstream belongs to the ORIGINAL bundle + return ((Bitstream) usageEvent.getObject()) + .getBundles().stream() + .anyMatch(bundle -> bundle.getName().equals(Constants.CONTENT_BUNDLE_NAME)); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + return true; + } + return false; } private boolean isGoogleAnalyticsKeyNotConfigured() { diff --git a/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java b/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java index 866d0fafedb3..e43e9fd82035 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java @@ -29,12 +29,16 @@ import java.util.List; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Item; +import org.dspace.core.Constants; import org.dspace.google.client.GoogleAnalyticsClient; import org.dspace.services.ConfigurationService; import org.junit.After; @@ -61,6 +65,8 @@ public class GoogleAsyncEventListenerIT extends AbstractControllerIntegrationTes private Bitstream bitstream; + private Item item; + private List originalGoogleAnalyticsClients; private GoogleAnalyticsClient firstGaClientMock = mock(GoogleAnalyticsClient.class); @@ -80,7 +86,7 @@ public void setup() throws Exception { .withName("Test collection") .build(); - Item item = ItemBuilder.createItem(context, collection) + item = ItemBuilder.createItem(context, collection) .withTitle("Test item") .build(); @@ -238,6 +244,88 @@ public void testOnBitstreamContentDownloadWithTooManyEvents() throws Exception { } + @Test + public void testOnBitstreamContentDownloadExcludeNonContentBitstreams() throws Exception { + configurationService.setProperty("google-analytics.exclude-non-content-bitstreams", true); + + context.turnOffAuthorisationSystem(); + Bundle licenseBundle = BundleBuilder.createBundle(context, item) + .withName(Constants.LICENSE_BUNDLE_NAME).build(); + Bitstream license = BitstreamBuilder.createBitstream(context, licenseBundle, + toInputStream("License", defaultCharset())).build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, + toInputStream("Thumbnail", defaultCharset())).build(); + context.restoreAuthSystemState(); + + assertThat(getStoredEventsAsList(), empty()); + + String bitstreamUrl = "/api/core/bitstreams/" + bitstream.getID() + "/content"; + + downloadBitstreamContent("Postman", "123456", "REF"); + downloadContent("Chrome", "ABCDEFG", "REF-1", license); + downloadContent("Chrome", "987654", "REF-2", thumbnail); + + assertThat(getStoredEventsAsList(), hasSize(1)); + + List storedEvents = getStoredEventsAsList(); + + assertThat(storedEvents, contains( + event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item"))); + + googleAsyncEventListener.sendCollectedEvents(); + + assertThat(getStoredEventsAsList(), empty()); + + verify(firstGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).sendEvents(ANALYTICS_KEY, storedEvents); + verifyNoMoreInteractions(firstGaClientMock, secondGaClientMock); + } + + @Test + public void testOnBitstreamContentDownloadIncludeNonContentBitstreams() throws Exception { + configurationService.setProperty("google-analytics.exclude-non-content-bitstreams", false); + + context.turnOffAuthorisationSystem(); + Bundle licenseBundle = BundleBuilder.createBundle(context, item) + .withName(Constants.LICENSE_BUNDLE_NAME).build(); + Bitstream license = BitstreamBuilder.createBitstream(context, licenseBundle, + toInputStream("License", defaultCharset())).build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, + toInputStream("Thumbnail", defaultCharset())).build(); + context.restoreAuthSystemState(); + + assertThat(getStoredEventsAsList(), empty()); + + String bitstreamUrl = "/api/core/bitstreams/" + bitstream.getID() + "/content"; + String licenseUrl = "/api/core/bitstreams/" + license.getID() + "/content"; + String thumbnailUrl = "/api/core/bitstreams/" + thumbnail.getID() + "/content"; + + downloadBitstreamContent("Postman", "123456", "REF"); + downloadContent("Chrome", "ABCDEFG", "REF-1", license); + downloadContent("Chrome", "987654", "REF-2", thumbnail); + + assertThat(getStoredEventsAsList(), hasSize(3)); + + List storedEvents = getStoredEventsAsList(); + + assertThat(storedEvents, contains( + event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item"), + event("ABCDEFG", "127.0.0.1", "Chrome", "REF-1", licenseUrl, "Test item"), + event("987654", "127.0.0.1", "Chrome", "REF-2", thumbnailUrl, "Test item"))); + + googleAsyncEventListener.sendCollectedEvents(); + + assertThat(getStoredEventsAsList(), empty()); + + verify(firstGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).isAnalyticsKeySupported(ANALYTICS_KEY); + verify(secondGaClientMock).sendEvents(ANALYTICS_KEY, storedEvents); + verifyNoMoreInteractions(firstGaClientMock, secondGaClientMock); + } + @SuppressWarnings("unchecked") private List getStoredEventsAsList() { List events = new ArrayList<>(); @@ -248,13 +336,18 @@ private List getStoredEventsAsList() { return events; } - private void downloadBitstreamContent(String userAgent, String correlationId, String referrer) throws Exception { + private void downloadContent(String userAgent, String correlationId, String referrer, Bitstream bit) + throws Exception { getClient(getAuthToken(admin.getEmail(), password)) - .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content") - .header("USER-AGENT", userAgent) - .header("X-CORRELATION-ID", correlationId) - .header("X-REFERRER", referrer)) + .perform(get("/api/core/bitstreams/" + bit.getID() + "/content") + .header("USER-AGENT", userAgent) + .header("X-CORRELATION-ID", correlationId) + .header("X-REFERRER", referrer)) .andExpect(status().isOk()); } + private void downloadBitstreamContent(String userAgent, String correlationId, String referrer) throws Exception { + downloadContent(userAgent, correlationId, referrer, bitstream); + } + } diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 8e532310c11b..89c8da92558c 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1535,7 +1535,12 @@ log.report.dir = ${dspace.dir}/log # Defines a Measurement Protocol API Secret to be used to track interactions which occur outside of the user's browser. # For example , this is required to track downloads of bitstreams. This setting is only used by Google Analytics 4. # For more details see https://developers.google.com/analytics/devguides/collection/protocol/ga4 -# google.analytics.api-secret = +# google.analytics.api-secret = + +# Ensures only views of bitstreams in the 'ORIGINAL' bundle result in a GA4 event. +# Setting this to false may cause inflated bitstream view numbers, since requesting +# bitstreams in the 'THUMBNAIL' and 'LICENSE' bundles, will also result in GA4 events. +google-analytics.exclude-non-content-bitstreams=true #################################################################### #---------------------------------------------------------------# From e55bc87c1aee8806befcd9dede575b258dfdfa9c Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Mon, 10 Jul 2023 11:29:33 +0200 Subject: [PATCH 320/686] Add a configuration key to disable hierarchical browse indexes --- .../content/authority/ChoiceAuthorityServiceImpl.java | 10 ++++++++++ dspace/config/dspace.cfg | 5 +++++ 2 files changed, 15 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index 4cac1da31490..f2bc4f0be0f5 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -17,6 +17,7 @@ import java.util.Set; import java.util.stream.Collectors; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.app.util.DCInput; @@ -557,6 +558,15 @@ public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { init(); ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); if (source != null && source instanceof DSpaceControlledVocabulary) { + + // First, check if this vocabulary index is disabled + String[] vocabulariesDisabled = configurationService + .getArrayProperty("webui.browse.vocabularies.disabled"); + if (vocabulariesDisabled != null && ArrayUtils.contains(vocabulariesDisabled, nameVocab)) { + // Discard this vocabulary browse index + return null; + } + Set metadataFields = new HashSet<>(); Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); for (Map.Entry> formToField : formsToFields.entrySet()) { diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index cafd37931fd4..61d6fb589a60 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1158,6 +1158,11 @@ webui.browse.index.4 = subject:metadata:dc.subject.*:text ## example of authority-controlled browse category - see authority control config #webui.browse.index.5 = lcAuthor:metadataAuthority:dc.contributor.author:authority +# By default, browse hierarchical indexes are created based on the used controlled +# vocabularies in the submission forms. These could be disabled adding the name of +# the vocabularies to exclude in this comma-separated property: +# webui.browse.vocabularies.disabled = srsc + # Enable/Disable tag cloud in browsing. # webui.browse.index.tagcloud. = true | false # where n is the index number from the above options From 61f50393c2062bf40f3760ba71b9bb24226402cb Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 11 Jul 2023 15:49:04 +0200 Subject: [PATCH 321/686] [DSC-776] Restored mutliple entries import for bibtex files --- ...BibtexImportMetadataSourceServiceImpl.java | 5 + .../external/service/ImportService.java | 89 +++++++++++--- .../AbstractPlainMetadataSource.java | 5 +- .../service/components/FileSource.java | 11 +- .../WorkspaceItemRestRepository.java | 45 +++---- .../rest/WorkspaceItemRestRepositoryIT.java | 110 +++++++++++++++--- 6 files changed, 210 insertions(+), 55 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java index 0014088c8650..4b6a5aa92e72 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java @@ -117,4 +117,9 @@ public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldM super.setMetadataFieldMap(metadataFieldMap); } + @Override + public boolean canImportMultipleRecords() { + return true; + } + } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java b/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java index 28df30b345bc..a444a3609c15 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java @@ -311,7 +311,7 @@ public boolean canImportFromFile(String originalName) { /* * Get a collection of record from File, * The first match will be return. - * + * * @param file The file from which will read records * @param originalName The original file name or full path * @return a single record contains the metadatum @@ -319,28 +319,83 @@ public boolean canImportFromFile(String originalName) { */ public ImportRecord getRecord(File file, String originalName) throws FileMultipleOccurencesException, FileSourceException { - ImportRecord importRecords = null; - for (MetadataSource metadataSource : importSources.values()) { - try (InputStream fileInputStream = new FileInputStream(file)) { - if (metadataSource instanceof FileSource) { - FileSource fileSource = (FileSource)metadataSource; - if (fileSource.isValidSourceForFile(originalName)) { - importRecords = fileSource.getRecord(fileInputStream); - break; + try (InputStream fileInputStream = new FileInputStream(file)) { + FileSource fileSource = this.getFileSource(fileInputStream, originalName); + try { + if (fileSource.isValidSourceForFile(originalName)) { + return fileSource.getRecord(fileInputStream); + } + } catch (FileSourceException e) { + log.debug(fileSource.getImportSource() + " isn't a valid parser for file", e); + } + //catch statements is required because we could have supported format (i.e. XML) + //which fail on schema validation + } catch (FileMultipleOccurencesException e) { + log.debug("File contains multiple metadata, return with error"); + throw e; + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + return null; + } + + /** + * Get a collection of record from File, + * + * @param file The file from which will read records + * @param originalName The original file name or full path + * @return records containing metdatum + * @throws FileMultipleOccurencesException if the import configured for the {@code file} + * doesn't allow multiple records import. + * @throws FileSourceException if the file cannot be read. + */ + public List getRecords(File file, String originalName) + throws FileMultipleOccurencesException, FileSourceException { + try (InputStream fileInputStream = new FileInputStream(file)) { + FileSource fileSource = this.getFileSource(fileInputStream, originalName); + try { + if (fileSource.isValidSourceForFile(originalName)) { + List records = fileSource.getRecords(fileInputStream); + if (!fileSource.canImportMultipleRecords() && records.size() > 1) { + throw new FileMultipleOccurencesException( + "Found " + records.size() + " entries in file ( " + + originalName + + " ) but import source ( " + + fileSource.getImportSource() + + " ) not allowed to import multiple records" + ); } + return records; } + } catch (FileSourceException e) { + log.debug(fileSource.getImportSource() + " isn't a valid parser for file", e); + } //catch statements is required because we could have supported format (i.e. XML) //which fail on schema validation - } catch (FileSourceException e) { - log.debug(metadataSource.getImportSource() + " isn't a valid parser for file"); - } catch (FileMultipleOccurencesException e) { - log.debug("File contains multiple metadata, return with error"); - throw e; - } catch (IOException e1) { - throw new FileSourceException("File cannot be read, may be null"); + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + return null; + } + + protected FileSource getFileSource(File file, String originalName) throws FileSourceException { + try (InputStream fileInputStream = new FileInputStream(file)) { + return getFileSource(file, originalName); + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + } + + protected FileSource getFileSource(InputStream fileInputStream, String originalName) { + for (MetadataSource metadataSource : importSources.values()) { + if (metadataSource instanceof FileSource) { + FileSource fileSource = (FileSource)metadataSource; + if (fileSource.isValidSourceForFile(originalName)) { + return fileSource; + } } } - return importRecords; + return null; } /** diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java index 5d83b9a7cce4..b58f69b6665c 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java @@ -41,7 +41,7 @@ public abstract class AbstractPlainMetadataSource /** * Set the file extensions supported by this metadata service - * + * * @param supportedExtensions the file extensions (xml,txt,...) supported by this service */ public void setSupportedExtensions(List supportedExtensions) { @@ -64,6 +64,9 @@ public List getSupportedExtensions() { @Override public List getRecords(InputStream is) throws FileSourceException { List datas = readData(is); + if (datas == null) { + return List.of(); + } List records = new ArrayList<>(); for (PlainMetadataSourceDto item : datas) { records.add(toRecord(item)); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java index 801f5474bb4e..fffd476a69ee 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java @@ -52,7 +52,7 @@ public ImportRecord getRecord(InputStream inputStream) /** * This method is used to decide if the FileSource manage the file format - * + * * @param originalName the file file original name * @return true if the FileSource can parse the file, false otherwise */ @@ -67,4 +67,13 @@ public default boolean isValidSourceForFile(String originalName) { return false; } + /** + * This method is used to determine if we can import multiple records at once placed in the same source file. + * + * @return true if allowed to import multiple records in the same file, false otherwise + */ + public default boolean canImportMultipleRecords() { + return false; + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java index 770087d0b54b..1f1582596277 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java @@ -317,9 +317,9 @@ public Iterable upload(Context context, HttpServletRequest re for (MultipartFile mpFile : uploadfiles) { File file = Utils.getFile(mpFile, "upload-loader", "filedataloader"); try { - ImportRecord record = importService.getRecord(file, mpFile.getOriginalFilename()); - if (record != null) { - records.add(record); + List recordsFound = importService.getRecords(file, mpFile.getOriginalFilename()); + if (recordsFound != null && !recordsFound.isEmpty()) { + records.addAll(recordsFound); break; } } catch (Exception e) { @@ -334,11 +334,15 @@ public Iterable upload(Context context, HttpServletRequest re } catch (Exception e) { log.error("Error importing metadata", e); } - WorkspaceItem source = submissionService. - createWorkspaceItem(context, getRequestService().getCurrentRequest()); - merge(context, records, source); - result = new ArrayList<>(); - result.add(source); + result = new ArrayList<>(records.size()); + for (ImportRecord importRecord : records) { + WorkspaceItem source = submissionService. + createWorkspaceItem(context, getRequestService().getCurrentRequest()); + + merge(context, importRecord, source); + + result.add(source); + } //perform upload of bitstream if there is exact one result and convert workspaceitem to entity rest if (!result.isEmpty()) { @@ -348,18 +352,17 @@ public Iterable upload(Context context, HttpServletRequest re //load bitstream into bundle ORIGINAL only if there is one result (approximately this is the // right behaviour for pdf file but not for other bibliographic format e.g. bibtex) if (result.size() == 1) { + ClassLoader loader = this.getClass().getClassLoader(); for (int i = 0; i < submissionConfig.getNumberOfSteps(); i++) { SubmissionStepConfig stepConfig = submissionConfig.getStep(i); - ClassLoader loader = this.getClass().getClassLoader(); - Class stepClass; try { - stepClass = loader.loadClass(stepConfig.getProcessingClassName()); - Object stepInstance = stepClass.newInstance(); + Class stepClass = loader.loadClass(stepConfig.getProcessingClassName()); + Object stepInstance = stepClass.getConstructor().newInstance(); if (UploadableStep.class.isAssignableFrom(stepClass)) { UploadableStep uploadableStep = (UploadableStep) stepInstance; for (MultipartFile mpFile : uploadfiles) { - ErrorRest err = uploadableStep.upload(context, - submissionService, stepConfig, wi, mpFile); + ErrorRest err = + uploadableStep.upload(context, submissionService, stepConfig, wi, mpFile); if (err != null) { errors.add(err); } @@ -449,7 +452,7 @@ private BaseObjectRest findItemRestById(Context context, String itemId) throw return authorizationRestUtil.getObject(context, objectId); } - private void merge(Context context, List records, WorkspaceItem item) throws SQLException { + private void merge(Context context, ImportRecord record, WorkspaceItem item) throws SQLException { for (MetadataValue metadataValue : itemService.getMetadata( item.getItem(), Item.ANY, Item.ANY, Item.ANY, Item.ANY)) { itemService.clearMetadata(context, item.getItem(), @@ -458,13 +461,11 @@ private void merge(Context context, List records, WorkspaceItem it metadataValue.getMetadataField().getQualifier(), metadataValue.getLanguage()); } - for (ImportRecord record : records) { - if (record != null && record.getValueList() != null) { - for (MetadatumDTO metadataValue : record.getValueList()) { - itemService.addMetadata(context, item.getItem(), metadataValue.getSchema(), - metadataValue.getElement(), metadataValue.getQualifier(), null, - metadataValue.getValue()); - } + if (record != null && record.getValueList() != null) { + for (MetadatumDTO metadataValue : record.getValueList()) { + itemService.addMetadata(context, item.getItem(), metadataValue.getSchema(), + metadataValue.getElement(), metadataValue.getQualifier(), null, + metadataValue.getValue()); } } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index c7725805687d..d9d2c0fcf708 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -60,9 +60,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.jsonpath.matchers.JsonPathMatchers; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; +import org.apache.commons.text.StringEscapeUtils; import org.dspace.app.rest.matcher.CollectionMatcher; import org.dspace.app.rest.matcher.ItemMatcher; import org.dspace.app.rest.matcher.MetadataMatcher; @@ -2027,27 +2027,109 @@ public void createSingleWorkspaceItemsFromSingleFileWithMultipleEntriesTest() th Collection col1 = CollectionBuilder.createCollection(context, child1) .withName("Collection 1") .withSubmitterGroup(eperson) + .withEntityType("Publication") + .withSubmissionDefinition("traditional") .build(); Collection col2 = CollectionBuilder.createCollection(context, child1) .withName("Collection 2") .withSubmitterGroup(eperson) + .withEntityType("Publication") + .withSubmissionDefinition("traditional") .build(); - InputStream bibtex = getClass().getResourceAsStream("bibtex-test-3-entries.bib"); - final MockMultipartFile bibtexFile = new MockMultipartFile("file", "bibtex-test-3-entries.bib", - "application/x-bibtex", - bibtex); + try (InputStream bibtex = getClass().getResourceAsStream("bibtex-test-3-entries.bib")) { + final MockMultipartFile bibtexFile = + new MockMultipartFile( + "file", "bibtex-test-3-entries.bib", + "application/x-bibtex", bibtex + ); - context.restoreAuthSystemState(); + context.restoreAuthSystemState(); - String authToken = getAuthToken(eperson.getEmail(), password); - // create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1) - getClient(authToken).perform(multipart("/api/submission/workspaceitems") - .file(bibtexFile)) - // create should return return a 422 because we don't allow/support bibliographic files - // that have multiple metadata records - .andExpect(status().is(422)); - bibtex.close(); + String authToken = getAuthToken(eperson.getEmail(), password); + // create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1) + getClient(authToken) + .perform( + multipart("/api/submission/workspaceitems").file(bibtexFile) + ) + // bulk create should return 200, 201 (created) is better for single resource + .andExpect(status().isOk()) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value", + is("My Article") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0]._embedded.collection.id", + is(col1.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1].sections.traditionalpageone['dc.title'][0].value", + is("My Article 2") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1]._embedded.collection.id", + is(col1.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[2].sections.traditionalpageone['dc.title'][0].value", + is("My Article 3") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[2]._embedded.collection.id", + is(col1.getID().toString()) + ) + ) + .andExpect( + jsonPath("$._embedded.workspaceitems[*]._embedded.upload").doesNotExist()); + getClient(authToken) + .perform( + multipart("/api/submission/workspaceitems") + .file(bibtexFile) + .param("owningCollection", col2.getID().toString()) + ) + .andExpect(status().isOk()) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value", + is("My Article") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[0]._embedded.collection.id", + is(col2.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1].sections.traditionalpageone['dc.title'][0].value", + is("My Article 2") + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[1]._embedded.collection.id", + is(col2.getID().toString()) + ) + ) + .andExpect( + jsonPath( + "$._embedded.workspaceitems[2].sections.traditionalpageone['dc.title'][0].value", + is("My Article 3") + ) + ); + } } @Test From 334a02c2be2d69f5c07f748d765dbf9364e9580f Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Tue, 27 Jun 2023 17:01:32 -0400 Subject: [PATCH 322/686] More and better(?) documentation. --- .../src/main/java/org/dspace/core/Email.java | 116 +++++++++++++----- 1 file changed, 84 insertions(+), 32 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index 998d934c9558..a95407876e11 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -57,26 +57,40 @@ import org.dspace.services.factory.DSpaceServicesFactory; /** - * Class representing an e-mail message, also used to send e-mails. + * Class representing an e-mail message. The {@link send} method causes the + * assembled message to be formatted and sent. *

* Typical use: - *

+ *
+ * Email email = Email.getEmail(path);
+ * email.addRecipient("foo@bar.com");
+ * email.addArgument("John");
+ * email.addArgument("On the Testing of DSpace");
+ * email.send();
+ * 
+ * {@code path} is the filesystem path of an email template, typically in + * {@code ${dspace.dir}/config/emails/} and can include the subject -- see + * below. Templates are processed by
+ * Apache Velocity. They may contain VTL directives and property + * placeholders. + *

+ * {@link addArgument(string)} adds a property to the {@code params} array + * in the Velocity context, which can be used to replace placeholder tokens + * in the message. These arguments are indexed by number in the order they were + * added to the message. *

- * Email email = new Email();
- * email.addRecipient("foo@bar.com");
- * email.addArgument("John");
- * email.addArgument("On the Testing of DSpace");
- * email.send();
- *

+ * The DSpace configuration properties are also available to templates as the + * array {@code config}, indexed by name. Example: {@code ${config.get('dspace.name')}} *

- * name is the name of an email template in - * dspace-dir/config/emails/ (which also includes the subject.) - * arg0 and arg1 are arguments to fill out the - * message with. - *

- * Emails are formatted using Apache Velocity. Headers such as Subject may be - * supplied by the template, by defining them using #set(). Example: - *

+ * Recipients and attachments may be added as needed. See {@link addRecipient}, + * {@link addAttachment(File, String)}, and + * {@link addAttachment(InputStream, String, String)}. + *

+ * Headers such as Subject may be supplied by the template, by defining them + * using the VTL directive {@code #set()}. Only headers named in the DSpace + * configuration array property {@code mail.message.headers} will be added. + *

+ * Example: * *

  *
@@ -91,12 +105,14 @@
  *
  *     Thank you for sending us your submission "${params[1]}".
  *
+ *     --
+ *     The ${config.get('dspace.name')} Team
+ *
  * 
* *

* If the example code above was used to send this mail, the resulting mail * would have the subject Example e-mail and the body would be: - *

* *
  *
@@ -105,7 +121,16 @@
  *
  *     Thank you for sending us your submission "On the Testing of DSpace".
  *
+ *     --
+ *     The DSpace Team
+ *
  * 
+ *

+ * There are two ways to load a message body. One can create an instance of + * {@link Email} and call {@link setContent} on it, passing the body as a String. Or + * one can use the static factory method {@link getEmail} to load a file by its + * complete filesystem path. In either case the text will be loaded into a + * Velocity template. * * @author Robert Tansley * @author Jim Downing - added attachment handling code @@ -182,7 +207,7 @@ public Email() { } /** - * Add a recipient + * Add a recipient. * * @param email the recipient's email address */ @@ -205,7 +230,7 @@ public void setContent(String name, String cnt) { } /** - * Set the subject of the message + * Set the subject of the message. * * @param s the subject of the message */ @@ -214,7 +239,7 @@ public void setSubject(String s) { } /** - * Set the reply-to email address + * Set the reply-to email address. * * @param email the reply-to email address */ @@ -223,7 +248,7 @@ public void setReplyTo(String email) { } /** - * Fill out the next argument in the template + * Fill out the next argument in the template. * * @param arg the value for the next argument */ @@ -231,6 +256,13 @@ public void addArgument(Object arg) { arguments.add(arg); } + /** + * Add an attachment bodypart to the message from an external file. + * + * @param f reference to a file to be attached. + * @param name a name for the resulting bodypart in the message's MIME + * structure. + */ public void addAttachment(File f, String name) { attachments.add(new FileAttachment(f, name)); } @@ -238,6 +270,17 @@ public void addAttachment(File f, String name) { /** When given a bad MIME type for an attachment, use this instead. */ private static final String DEFAULT_ATTACHMENT_TYPE = "application/octet-stream"; + /** + * Add an attachment bodypart to the message from a byte stream. + * + * @param is the content of this stream will become the content of the + * bodypart. + * @param name a name for the resulting bodypart in the message's MIME + * structure. + * @param mimetype the MIME type of the resulting bodypart, such as + * "text/pdf". If {@code null} it will default to + * "application/octet-stream", which is MIME for "unknown format". + */ public void addAttachment(InputStream is, String name, String mimetype) { if (null == mimetype) { LOG.error("Null MIME type replaced with '" + DEFAULT_ATTACHMENT_TYPE @@ -257,6 +300,11 @@ public void addAttachment(InputStream is, String name, String mimetype) { moreAttachments.add(new InputStreamAttachment(is, name, mimetype)); } + /** + * Set the character set of the message. + * + * @param cs the name of a character set, such as "UTF-8" or "EUC-JP". + */ public void setCharset(String cs) { charset = cs; } @@ -447,6 +495,9 @@ public void send() throws MessagingException, IOException { /** * Get the VTL template for an email message. The message is suitable * for inserting values using Apache Velocity. + *

+ * Note that everything is stored here, so that only send() throws a + * MessagingException. * * @param emailFile * full name for the email template, for example "/dspace/config/emails/register". @@ -484,15 +535,6 @@ public static Email getEmail(String emailFile) } return email; } - /* - * Implementation note: It might be necessary to add a quick utility method - * like "send(to, subject, message)". We'll see how far we get without it - - * having all emails as templates in the config allows customisation and - * internationalisation. - * - * Note that everything is stored and the run in send() so that only send() - * throws a MessagingException. - */ /** * Test method to send an email to check email server settings @@ -547,7 +589,7 @@ public static void main(String[] args) { } /** - * Utility struct class for handling file attachments. + * Utility record class for handling file attachments. * * @author ojd20 */ @@ -563,7 +605,7 @@ public FileAttachment(File f, String n) { } /** - * Utility struct class for handling file attachments. + * Utility record class for handling file attachments. * * @author Adán Román Ruiz at arvo.es */ @@ -580,6 +622,8 @@ public InputStreamAttachment(InputStream is, String name, String mimetype) { } /** + * Wrap an {@link InputStream} in a {@link DataSource}. + * * @author arnaldo */ public static class InputStreamDataSource implements DataSource { @@ -587,6 +631,14 @@ public static class InputStreamDataSource implements DataSource { private final String contentType; private final ByteArrayOutputStream baos; + /** + * Consume the content of an InputStream and store it in a local buffer. + * + * @param name give the DataSource a name. + * @param contentType the DataSource contains this type of data. + * @param inputStream content to be buffered in the DataSource. + * @throws IOException if the stream cannot be read. + */ InputStreamDataSource(String name, String contentType, InputStream inputStream) throws IOException { this.name = name; this.contentType = contentType; From d9f4ae7a890718ef7dbdfbaac29642f30a1e61b1 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Tue, 27 Jun 2023 17:05:21 -0400 Subject: [PATCH 323/686] Don't clear the list of accepted embedded message header properties. Why was this cleared? --- dspace-api/src/main/java/org/dspace/core/Email.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index a95407876e11..a64a85a07351 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -378,13 +378,12 @@ public void send() throws MessagingException, IOException { // No template and no content -- PANIC!!! throw new MessagingException("Email has no body"); } - // No template, so use a String of content. + // No existing template, so use a String of content. StringResourceRepository repo = (StringResourceRepository) templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); repo.putStringResource(contentName, content); // Turn content into a template. template = templateEngine.getTemplate(contentName); - templateHeaders = new String[] {}; } StringWriter writer = new StringWriter(); From 03f24a33397f410c6059d595a66c1a05bdd8da35 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Tue, 27 Jun 2023 17:23:26 -0400 Subject: [PATCH 324/686] Simplify and modernize the code. Store content directly in the template. --- .../src/main/java/org/dspace/core/Email.java | 58 ++++++++----------- 1 file changed, 25 insertions(+), 33 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index a64a85a07351..f6df740a53ef 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -21,7 +21,6 @@ import java.util.Collections; import java.util.Date; import java.util.Enumeration; -import java.util.Iterator; import java.util.List; import java.util.Properties; import javax.activation.DataHandler; @@ -41,7 +40,6 @@ import javax.mail.internet.MimeMultipart; import javax.mail.internet.ParseException; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.velocity.Template; @@ -140,7 +138,6 @@ public class Email { /** * The content of the message */ - private String content; private String contentName; /** @@ -201,7 +198,6 @@ public Email() { moreAttachments = new ArrayList<>(10); subject = ""; template = null; - content = ""; replyTo = null; charset = null; } @@ -221,12 +217,20 @@ public void addRecipient(String email) { * "Subject:" line must be stripped. * * @param name a name for this message body - * @param cnt the content of the message + * @param content the content of the message */ - public void setContent(String name, String cnt) { - content = cnt; + public void setContent(String name, String content) { contentName = name; arguments.clear(); + + VelocityEngine templateEngine = new VelocityEngine(); + templateEngine.init(VELOCITY_PROPERTIES); + + StringResourceRepository repo = (StringResourceRepository) + templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); + repo.putStringResource(contentName, content); + // Turn content into a template. + template = templateEngine.getTemplate(contentName); } /** @@ -328,15 +332,20 @@ public void reset() { * {@code mail.message.headers} then that name and its value will be added * to the message's headers. * - *

"subject" is treated specially: if {@link setSubject()} has not been called, - * the value of any "subject" property will be used as if setSubject had - * been called with that value. Thus a template may define its subject, but - * the caller may override it. + *

"subject" is treated specially: if {@link setSubject()} has not been + * called, the value of any "subject" property will be used as if setSubject + * had been called with that value. Thus a template may define its subject, + * but the caller may override it. * * @throws MessagingException if there was a problem sending the mail. * @throws IOException if IO error */ public void send() throws MessagingException, IOException { + if (null == template) { + // No template -- no content -- PANIC!!! + throw new MessagingException("Email has no body"); + } + ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -356,36 +365,18 @@ public void send() throws MessagingException, IOException { MimeMessage message = new MimeMessage(session); // Set the recipients of the message - Iterator i = recipients.iterator(); - - while (i.hasNext()) { - message.addRecipient(Message.RecipientType.TO, new InternetAddress( - i.next())); + for (String recipient : recipients) { + message.addRecipient(Message.RecipientType.TO, + new InternetAddress(recipient)); } // Get headers defined by the template. String[] templateHeaders = config.getArrayProperty("mail.message.headers"); // Format the mail message body - VelocityEngine templateEngine = new VelocityEngine(); - templateEngine.init(VELOCITY_PROPERTIES); - VelocityContext vctx = new VelocityContext(); vctx.put("config", new UnmodifiableConfigurationService(config)); vctx.put("params", Collections.unmodifiableList(arguments)); - if (null == template) { - if (StringUtils.isBlank(content)) { - // No template and no content -- PANIC!!! - throw new MessagingException("Email has no body"); - } - // No existing template, so use a String of content. - StringResourceRepository repo = (StringResourceRepository) - templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); - repo.putStringResource(contentName, content); - // Turn content into a template. - template = templateEngine.getTemplate(contentName); - } - StringWriter writer = new StringWriter(); try { template.merge(vctx, writer); @@ -452,7 +443,8 @@ public void send() throws MessagingException, IOException { // add the stream messageBodyPart = new MimeBodyPart(); messageBodyPart.setDataHandler(new DataHandler( - new InputStreamDataSource(attachment.name,attachment.mimetype,attachment.is))); + new InputStreamDataSource(attachment.name, + attachment.mimetype, attachment.is))); messageBodyPart.setFileName(attachment.name); multipart.addBodyPart(messageBodyPart); } From 69f4829edb726b1d4f2c5b4de3da689cc0de5c76 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Mon, 10 Jul 2023 13:43:27 +0200 Subject: [PATCH 325/686] Add flag Pattern.UNICODE_CHARACTER_CLASS to pattern compilation to recognize unicode characters --- .../org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 005f9b42472e..e1136a3f5937 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -845,7 +845,7 @@ private void indexIfFilterTypeFacet(SolrInputDocument doc, DiscoverySearchFilter private void saveFacetPrefixParts(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value, String separator, String authority, String preferedLabel) { value = StringUtils.normalizeSpace(value); - Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE); + Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CHARACTER_CLASS); Matcher matcher = pattern.matcher(value); while (matcher.find()) { int index = matcher.start(); From 2ebe1d69e4fdf0df0438f6b13b3d97692da8fbe2 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 6 Jul 2023 15:44:02 -0500 Subject: [PATCH 326/686] Enable Pull Request Opened action to assign PRs to their creator --- .../pull_request_opened.yml | 26 ------------------- .github/workflows/pull_request_opened.yml | 24 +++++++++++++++++ 2 files changed, 24 insertions(+), 26 deletions(-) delete mode 100644 .github/disabled-workflows/pull_request_opened.yml create mode 100644 .github/workflows/pull_request_opened.yml diff --git a/.github/disabled-workflows/pull_request_opened.yml b/.github/disabled-workflows/pull_request_opened.yml deleted file mode 100644 index 0dc718c0b9a3..000000000000 --- a/.github/disabled-workflows/pull_request_opened.yml +++ /dev/null @@ -1,26 +0,0 @@ -# This workflow runs whenever a new pull request is created -# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs). -# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818 -name: Pull Request opened - -# Only run for newly opened PRs against the "main" branch -on: - pull_request: - types: [opened] - branches: - - main - -jobs: - automation: - runs-on: ubuntu-latest - steps: - # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards - # See https://github.com/marketplace/actions/pull-request-assigner - - name: Assign PR to creator - uses: thomaseizinger/assign-pr-creator-action@v1.0.0 - # Note, this authentication token is created automatically - # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - # Ignore errors. It is possible the PR was created by someone who cannot be assigned - continue-on-error: true diff --git a/.github/workflows/pull_request_opened.yml b/.github/workflows/pull_request_opened.yml new file mode 100644 index 000000000000..9b61af72d187 --- /dev/null +++ b/.github/workflows/pull_request_opened.yml @@ -0,0 +1,24 @@ +# This workflow runs whenever a new pull request is created +name: Pull Request opened + +# Only run for newly opened PRs against the "main" or maintenance branches +# We allow this to run for `pull_request_target` so that github secrets are available +# (This is required to assign a PR back to the creator when the PR comes from a forked repo) +on: + pull_request_target: + types: [ opened ] + branches: + - main + - 'dspace-**' + +permissions: + pull-requests: write + +jobs: + automation: + runs-on: ubuntu-latest + steps: + # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards + # See https://github.com/toshimaru/auto-author-assign + - name: Assign PR to creator + uses: toshimaru/auto-author-assign@v1.6.2 From 4471a2dc2308bfc84efaf2b7b604ed7cada56a64 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 6 Jul 2023 15:44:36 -0500 Subject: [PATCH 327/686] Ensure codescan and label_merge_conflicts run on maintenance branches --- .github/workflows/codescan.yml | 10 +++++++--- .github/workflows/label_merge_conflicts.yml | 7 ++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/codescan.yml b/.github/workflows/codescan.yml index 7580b4ba3dc3..9e6dcc0b23af 100644 --- a/.github/workflows/codescan.yml +++ b/.github/workflows/codescan.yml @@ -5,12 +5,16 @@ # because CodeQL requires a fresh build with all tests *disabled*. name: "Code Scanning" -# Run this code scan for all pushes / PRs to main branch. Also run once a week. +# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week. on: push: - branches: [ main ] + branches: + - main + - 'dspace-**' pull_request: - branches: [ main ] + branches: + - main + - 'dspace-**' # Don't run if PR is only updating static documentation paths-ignore: - '**/*.md' diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index cc0c7099f40e..0c3b1a0f2a80 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -1,11 +1,12 @@ # This workflow checks open PRs for merge conflicts and labels them when conflicts are found name: Check for merge conflicts -# Run whenever the "main" branch is updated -# NOTE: This means merge conflicts are only checked for when a PR is merged to main. +# Run this for all pushes (i.e. merges) to 'main' or maintenance branches on: push: - branches: [ main ] + branches: + - main + - 'dspace-**' # So that the `conflict_label_name` is removed if conflicts are resolved, # we allow this to run for `pull_request_target` so that github secrets are available. pull_request_target: From 211529f97fd745bd6b25186a976d7e6d296f492b Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 6 Jul 2023 16:32:16 -0500 Subject: [PATCH 328/686] Split docker image builds into separate jobs to allow them to run in parallel. --- .github/workflows/docker.yml | 267 ++++++++++++++++++++++++++++------- 1 file changed, 219 insertions(+), 48 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 971954a5e1ee..9ec6b8573500 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -15,30 +15,36 @@ on: permissions: contents: read # to fetch code (actions/checkout) +# Define shared environment variables for all jobs below +env: + # Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action) + # For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image. + # For a new commit on other branches, use the branch name as the tag for Docker image. + # For a new tag, copy that tag name as the tag for Docker image. + IMAGE_TAGS: | + type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }} + type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }} + type=ref,event=tag + # Define default tag "flavor" for docker/metadata-action per + # https://github.com/docker/metadata-action#flavor-input + # We turn off 'latest' tag by default. + TAGS_FLAVOR: | + latest=false + # Architectures / Platforms for which we will build Docker images + # If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work. + # If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH + # longer (around 45mins or so) which is why we only run it when pushing a new Docker image. + PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }} + jobs: - docker: + #################################################### + # Build/Push the 'dspace/dspace-dependencies' image. + # This image is used by all other jobs. + #################################################### + dspace-dependencies: # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' if: github.repository == 'dspace/dspace' runs-on: ubuntu-latest - env: - # Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action) - # For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image. - # For a new commit on other branches, use the branch name as the tag for Docker image. - # For a new tag, copy that tag name as the tag for Docker image. - IMAGE_TAGS: | - type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }} - type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }} - type=ref,event=tag - # Define default tag "flavor" for docker/metadata-action per - # https://github.com/docker/metadata-action#flavor-input - # We turn off 'latest' tag by default. - TAGS_FLAVOR: | - latest=false - # Architectures / Platforms for which we will build Docker images - # If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work. - # If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH - # longer (around 45mins or so) which is why we only run it when pushing a new Docker image. - PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }} steps: # https://github.com/actions/checkout @@ -62,9 +68,6 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_ACCESS_TOKEN }} - #################################################### - # Build/Push the 'dspace/dspace-dependencies' image - #################################################### # https://github.com/docker/metadata-action # Get Metadata for docker_build_deps step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image @@ -78,7 +81,7 @@ jobs: # https://github.com/docker/build-push-action - name: Build and push 'dspace-dependencies' image id: docker_build_deps - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.dependencies @@ -90,9 +93,38 @@ jobs: tags: ${{ steps.meta_build_deps.outputs.tags }} labels: ${{ steps.meta_build_deps.outputs.labels }} - ####################################### - # Build/Push the 'dspace/dspace' image - ####################################### + ####################################### + # Build/Push the 'dspace/dspace' image + ####################################### + dspace: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image id: meta_build @@ -104,7 +136,7 @@ jobs: - name: Build and push 'dspace' image id: docker_build - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile @@ -116,9 +148,38 @@ jobs: tags: ${{ steps.meta_build.outputs.tags }} labels: ${{ steps.meta_build.outputs.labels }} - ##################################################### - # Build/Push the 'dspace/dspace' image ('-test' tag) - ##################################################### + ############################################################# + # Build/Push the 'dspace/dspace' image ('-test' tag) + ############################################################# + dspace-test: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_test step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image id: meta_build_test @@ -133,7 +194,7 @@ jobs: - name: Build and push 'dspace-test' image id: docker_build_test - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.test @@ -145,9 +206,38 @@ jobs: tags: ${{ steps.meta_build_test.outputs.tags }} labels: ${{ steps.meta_build_test.outputs.labels }} - ########################################### - # Build/Push the 'dspace/dspace-cli' image - ########################################### + ########################################### + # Build/Push the 'dspace/dspace-cli' image + ########################################### + dspace-cli: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_test step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image id: meta_build_cli @@ -159,7 +249,7 @@ jobs: - name: Build and push 'dspace-cli' image id: docker_build_cli - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.cli @@ -171,9 +261,36 @@ jobs: tags: ${{ steps.meta_build_cli.outputs.tags }} labels: ${{ steps.meta_build_cli.outputs.labels }} - ########################################### - # Build/Push the 'dspace/dspace-solr' image - ########################################### + ########################################### + # Build/Push the 'dspace/dspace-solr' image + ########################################### + dspace-solr: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_solr step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image id: meta_build_solr @@ -185,7 +302,7 @@ jobs: - name: Build and push 'dspace-solr' image id: docker_build_solr - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./dspace/src/main/docker/dspace-solr/Dockerfile @@ -197,9 +314,36 @@ jobs: tags: ${{ steps.meta_build_solr.outputs.tags }} labels: ${{ steps.meta_build_solr.outputs.labels }} - ########################################################### - # Build/Push the 'dspace/dspace-postgres-pgcrypto' image - ########################################################### + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image + ########################################################### + dspace-postgres-pgcrypto: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_postgres step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image id: meta_build_postgres @@ -211,7 +355,7 @@ jobs: - name: Build and push 'dspace-postgres-pgcrypto' image id: docker_build_postgres - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: # Must build out of subdirectory to have access to install script for pgcrypto context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ @@ -224,9 +368,36 @@ jobs: tags: ${{ steps.meta_build_postgres.outputs.tags }} labels: ${{ steps.meta_build_postgres.outputs.labels }} - ########################################################### - # Build/Push the 'dspace/dspace-postgres-pgcrypto' image ('-loadsql' tag) - ########################################################### + ######################################################################## + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag) + ######################################################################## + dspace-postgres-pgcrypto-loadsql: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v3 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Get Metadata for docker_build_postgres_loadsql step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image id: meta_build_postgres_loadsql @@ -241,7 +412,7 @@ jobs: - name: Build and push 'dspace-postgres-pgcrypto-loadsql' image id: docker_build_postgres_loadsql - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: # Must build out of subdirectory to have access to install script for pgcrypto context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ From dd8bfffedba2f922778d079b62e44cdd618e71b4 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 7 Jul 2023 11:47:09 -0500 Subject: [PATCH 329/686] Ensure 'main' code is tagged as 'latest' in DockerHub --- .github/workflows/docker.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 9ec6b8573500..f1ae184fd5c0 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -18,16 +18,16 @@ permissions: # Define shared environment variables for all jobs below env: # Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action) - # For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image. + # For a new commit on default branch (main), use the literal tag 'latest' on Docker image. # For a new commit on other branches, use the branch name as the tag for Docker image. # For a new tag, copy that tag name as the tag for Docker image. IMAGE_TAGS: | - type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }} + type=raw,value=latest,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }} type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }} type=ref,event=tag # Define default tag "flavor" for docker/metadata-action per # https://github.com/docker/metadata-action#flavor-input - # We turn off 'latest' tag by default. + # We manage the 'latest' tag ourselves to the 'main' branch (see settings above) TAGS_FLAVOR: | latest=false # Architectures / Platforms for which we will build Docker images From 4c329b43193a3bea151bdf9af27b663affcf7246 Mon Sep 17 00:00:00 2001 From: Sean Kalynuk Date: Wed, 26 Jul 2023 11:27:32 -0500 Subject: [PATCH 330/686] Fixes #8558 - set Solr UTC timezone Set the timezone of the Solr date formatter to UTC --- dspace-api/src/main/java/org/dspace/util/SolrUtils.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUtils.java b/dspace-api/src/main/java/org/dspace/util/SolrUtils.java index f62feba29886..7b11d73834bb 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUtils.java @@ -35,6 +35,8 @@ private SolrUtils() { } * @return date formatter compatible with Solr. */ public static DateFormat getDateFormatter() { - return new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); + DateFormat formatter = new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); + formatter.setTimeZone(SOLR_TIME_ZONE); + return formatter; } } From 6fe61b4a8e017d96e5ae2deaf49c59b33b1017e0 Mon Sep 17 00:00:00 2001 From: Sascha Szott Date: Tue, 11 Jul 2023 16:52:20 +0200 Subject: [PATCH 331/686] remove obsolete code fragments --- .../dspace/discovery/indexobject/ItemIndexFactoryImpl.java | 7 ------- 1 file changed, 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index e1136a3f5937..7cdb8b93d80e 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -172,13 +172,6 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI addNamedResourceTypeIndex(doc, acvalue); } - // write the index and close the inputstreamreaders - try { - log.info("Wrote Item: " + item.getID() + " to Index"); - } catch (RuntimeException e) { - log.error("Error while writing item to discovery index: " + item.getID() + " message:" - + e.getMessage(), e); - } return doc; } From 657312c8f044c9370988f207afde137437aeae16 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 27 Jul 2023 16:55:42 -0500 Subject: [PATCH 332/686] Add action to automatically create a port PR when specified (cherry picked from commit f6a898c3d13360286c416b2588ab0447d9e3d81b) --- .../workflows/port_merged_pull_request.yml | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/port_merged_pull_request.yml diff --git a/.github/workflows/port_merged_pull_request.yml b/.github/workflows/port_merged_pull_request.yml new file mode 100644 index 000000000000..418498fa4499 --- /dev/null +++ b/.github/workflows/port_merged_pull_request.yml @@ -0,0 +1,38 @@ +# This workflow will attempt to port a merged pull request to +# the branch specified in a "port to" label (if exists) +name: Port merged Pull Request + +# Only run for merged PRs against the "main" or maintenance branches +# We allow this to run for `pull_request_target` so that github secrets are available +# (This is required when the PR comes from a forked repo) +on: + pull_request_target: + types: [ closed ] + branches: + - main + - 'dspace-**' + +permissions: + contents: write # so action can add comments + pull-requests: write # so action can create pull requests + +jobs: + port_pr: + runs-on: ubuntu-latest + # Don't run on closed *unmerged* pull requests + if: github.event.pull_request.merged + steps: + # Checkout code + - uses: actions/checkout@v3 + # Port PR to other branch (ONLY if labeled with "port to") + # See https://github.com/korthout/backport-action + - name: Create backport pull requests + uses: korthout/backport-action@v1 + with: + # Trigger based on a "port to [branch]" label on PR + # (This label must specify the branch name to port to) + label_pattern: '^port to ([^ ]+)$' + # Title to add to the (newly created) port PR + pull_title: '[Port ${target_branch}] ${pull_title}' + # Description to add to the (newly created) port PR + pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.' \ No newline at end of file From 0162853bb5a3eb4613a3eae1200482b1fa0eb3ac Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 28 Jul 2023 11:15:08 -0500 Subject: [PATCH 333/686] Run PR Port action as 'dspace-bot' to allow new PRs to trigger CI checks --- .github/workflows/port_merged_pull_request.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/port_merged_pull_request.yml b/.github/workflows/port_merged_pull_request.yml index 418498fa4499..6c491e41c21b 100644 --- a/.github/workflows/port_merged_pull_request.yml +++ b/.github/workflows/port_merged_pull_request.yml @@ -35,4 +35,10 @@ jobs: # Title to add to the (newly created) port PR pull_title: '[Port ${target_branch}] ${pull_title}' # Description to add to the (newly created) port PR - pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.' \ No newline at end of file + pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.' + # Copy all labels from original PR to (newly created) port PR + # NOTE: The labels matching 'label_pattern' are automatically excluded + copy_labels_pattern: '*' + # Use a personal access token (PAT) to create PR as 'dspace-bot' user. + # A PAT is required in order for the new PR to trigger its own actions (for CI checks) + github_token: ${{ secrets.PR_PORT_TOKEN }} \ No newline at end of file From d1539efe9af4418ff2b11fcf6ce3f5e9b0d15f58 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 28 Jul 2023 11:15:42 -0500 Subject: [PATCH 334/686] Minor update to label_merge_conflicts to ignore any errors (seem random at this time) --- .github/workflows/label_merge_conflicts.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index 0c3b1a0f2a80..a023f4eef246 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -25,6 +25,8 @@ jobs: # See: https://github.com/prince-chrismc/label-merge-conflicts-action - name: Auto-label PRs with merge conflicts uses: prince-chrismc/label-merge-conflicts-action@v3 + # Ignore any failures -- may occur (randomly?) for older, outdated PRs. + continue-on-error: true # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Note, the authentication token is created automatically # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token From 867331e3af8264d2c9f9c8fa81e694ab8ca9736c Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 28 Jul 2023 11:48:30 -0500 Subject: [PATCH 335/686] Fix typo. Config must be a valid regex (cherry picked from commit 799528963e3c0391852ecbaf82ef21ec7d477342) --- .github/workflows/port_merged_pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/port_merged_pull_request.yml b/.github/workflows/port_merged_pull_request.yml index 6c491e41c21b..50faf3f88679 100644 --- a/.github/workflows/port_merged_pull_request.yml +++ b/.github/workflows/port_merged_pull_request.yml @@ -38,7 +38,7 @@ jobs: pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.' # Copy all labels from original PR to (newly created) port PR # NOTE: The labels matching 'label_pattern' are automatically excluded - copy_labels_pattern: '*' + copy_labels_pattern: '.*' # Use a personal access token (PAT) to create PR as 'dspace-bot' user. # A PAT is required in order for the new PR to trigger its own actions (for CI checks) github_token: ${{ secrets.PR_PORT_TOKEN }} \ No newline at end of file From 0ad1998ff53f62f77ec04e9ca1fbe80b915ac498 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Jul 2023 17:38:04 +0000 Subject: [PATCH 336/686] Bump h2 from 2.1.214 to 2.2.220 Bumps [h2](https://github.com/h2database/h2database) from 2.1.214 to 2.2.220. - [Release notes](https://github.com/h2database/h2database/releases) - [Commits](https://github.com/h2database/h2database/compare/version-2.1.214...version-2.2.220) --- updated-dependencies: - dependency-name: com.h2database:h2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 3873fa372c66..d2b3b3428856 100644 --- a/pom.xml +++ b/pom.xml @@ -1694,7 +1694,7 @@ com.h2database h2 - 2.1.210 + 2.2.220 test From 06b54d5afa52af26bc2fc47f6a154666879f2322 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Fri, 14 Jul 2023 11:06:35 +0100 Subject: [PATCH 337/686] Update FullTextContentStreams.java Fix NPE if bitstream is null --- .../dspace/discovery/FullTextContentStreams.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java index ee220e5a4fdf..6d0c57c62855 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java +++ b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java @@ -77,13 +77,19 @@ private void buildFullTextList(Item parentItem) { // a-ha! grab the text out of the bitstreams List bitstreams = myBundle.getBitstreams(); + log.debug("Processing full-text bitstreams. Item handle: " + sourceInfo); + for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) { fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream)); - log.debug("Added BitStream: " - + fulltextBitstream.getStoreNumber() + " " - + fulltextBitstream.getSequenceID() + " " - + fulltextBitstream.getName()); + if (fulltextBitstream != null) { + log.debug("Added BitStream: " + + fulltextBitstream.getStoreNumber() + " " + + fulltextBitstream.getSequenceID() + " " + + fulltextBitstream.getName()); + } else { + log.error("Found a NULL bitstream when processing full-text files: item handle:" + sourceInfo); + } } } } From 7b54dde3a610a2537e0048d74629a75ad96bb5db Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Fri, 14 Jul 2023 16:17:22 +0100 Subject: [PATCH 338/686] Update FullTextContentStreams.java Add additional NPE checks --- .../java/org/dspace/discovery/FullTextContentStreams.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java index 6d0c57c62855..21468def6866 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java +++ b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java @@ -76,7 +76,6 @@ private void buildFullTextList(Item parentItem) { if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) { // a-ha! grab the text out of the bitstreams List bitstreams = myBundle.getBitstreams(); - log.debug("Processing full-text bitstreams. Item handle: " + sourceInfo); for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) { @@ -164,16 +163,16 @@ public FullTextBitstream(final String parentHandle, final Bitstream file) { } public String getContentType(final Context context) throws SQLException { - BitstreamFormat format = bitstream.getFormat(context); + BitstreamFormat format = bitstream != null ? bitstream.getFormat(context) : null; return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType()); } public String getFileName() { - return StringUtils.trimToEmpty(bitstream.getName()); + return bitstream != null ? StringUtils.trimToEmpty(bitstream.getName()) : null; } public long getSize() { - return bitstream.getSizeBytes(); + return bitstream != null ? bitstream.getSizeBytes() : -1; } public InputStream getInputStream() throws SQLException, IOException, AuthorizeException { From e4c639c905516548ecf6e252b3a6ae4152fda509 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Sun, 16 Jul 2023 20:42:03 +0100 Subject: [PATCH 339/686] Update ItemUtils.java Prevent npe if bitstream is null --- dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 955c3a78c392..35bef8c8d77f 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -103,6 +103,11 @@ private static Element createBundlesElement(Context context, Item item) throws S bundle.getElement().add(bitstreams); List bits = b.getBitstreams(); for (Bitstream bit : bits) { + // Check if bitstream is null and log the error + if (bit == null) { + log.error("Null bitstream found, check item uuid: " + item.getID()); + break; + } Element bitstream = create("bitstream"); bitstreams.getElement().add(bitstream); String url = ""; From 41c992a231af2704a44964b897bc2ca0308cb160 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 26 Jul 2023 21:26:04 +0300 Subject: [PATCH 340/686] dspace.cfg: remove old webui.itemlist properties These properties are no longer used in DSpace 7: webui.itemlist.widths webui.itemlist.*.widths webui.itemlist.tablewidth (cherry picked from commit 16c46c49797cf4a15b8ef51efdd365610bdf73ab) --- dspace/config/dspace.cfg | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index cafd37931fd4..c37cfea0b196 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1472,11 +1472,6 @@ log.report.dir = ${dspace.dir}/log # # webui.itemlist.columns = thumbnail, dc.date.issued(date), dc.title, dc.contributor.* # -# You can customise the width of each column with the following line - you can have numbers (pixels) -# or percentages. For the 'thumbnail' column, a setting of '*' will use the max width specified -# for browse thumbnails (webui.browse.thumbnail.maxwidth, thumbnail.maxwidth) -# webui.itemlist.widths = *, 130, 60%, 40% - # Additionally, you can override the DC fields used on the listing page for # a given browse index and/or sort option. As a sort option or index may be defined # on a field that isn't normally included in the list, this allows you to display @@ -1502,14 +1497,6 @@ log.report.dir = ${dspace.dir}/log # and thumbnails in the item list # # webui.itemlist.dateaccessioned.columns = thumbnail, dc.date.accessioned(date), dc.title, dc.contributor.* -# -# As above, you can customise the width of the columns for each configured column list, substituting '.widths' for -# '.columns' in the property name. See the setting for webui.itemlist.widths for more details -# webui.itemlist.dateaccessioned.widths = *, 130, 60%, 40% - -# You can also set the overall size of the item list table with the following setting. It can lead to faster -# table rendering when used with the column widths above, but not generally recommended. -# webui.itemlist.tablewidth = 100% ##### SFX Server (OpenURL) ##### From 6e4490197b50387a3b32a5e2aa7f55cd10b8be63 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 26 Jul 2023 21:42:16 +0300 Subject: [PATCH 341/686] dspace.cfg: remove old webui.browse.thumbnail.show property The webui.browse.thumbnail.show property is no longer used as of DSpace 7. Also remove subsequent references to adding thumbnails to item view columns since these are from legacy DSpace. (cherry picked from commit 213a546486073f09e73d91d014c489ed300bf59d) --- dspace/config/dspace.cfg | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index c37cfea0b196..c7f1cb80a978 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1460,9 +1460,6 @@ log.report.dir = ${dspace.dir}/log # fields at least the date and title fields as specified by the # webui.browse.index.* configuration options below. # -# If you have enabled thumbnails (webui.browse.thumbnail.show), you must also -# include a 'thumbnail' entry in your columns - this is where the thumbnail will be displayed -# # If you want to mark each item include a 'mark_[value]' (without the brackets - replace the word 'value' with anything that # has a meaning for your mark) entry in your columns - this is where the icon will be displayed. # Do not forget to add a Spring bean with id = "org.dspace.app.itemmarking.ItemMarkingExtractor.[value]" @@ -1470,7 +1467,7 @@ log.report.dir = ${dspace.dir}/log # You can add more than one 'mark_[value]' options (with different value) in case you need to mark items more than one time for # different purposes. Remember to add the respective beans in file 'config/spring/api/item-marking.xml'. # -# webui.itemlist.columns = thumbnail, dc.date.issued(date), dc.title, dc.contributor.* +# webui.itemlist.columns = dc.date.issued(date), dc.title, dc.contributor.* # # Additionally, you can override the DC fields used on the listing page for # a given browse index and/or sort option. As a sort option or index may be defined @@ -1489,14 +1486,6 @@ log.report.dir = ${dspace.dir}/log # In the last case, a sort option name will always take precedence over a browse # index name. Note also, that for any additional columns you list, you will need to # ensure there is an itemlist. entry in the messages file. -# -# The following example would display the date of accession in place of the issue date -# whenever the dateaccessioned browse index or sort option is selected. -# -# Just like webui.itemlist.columns, you will need to include a 'thumbnail' entry to display -# and thumbnails in the item list -# -# webui.itemlist.dateaccessioned.columns = thumbnail, dc.date.accessioned(date), dc.title, dc.contributor.* ##### SFX Server (OpenURL) ##### From acf27a5d76de5981fbdf9aae2bb174d0ca2c4904 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 26 Jul 2023 21:45:42 +0300 Subject: [PATCH 342/686] dspace.cfg: remove old webui.itemlist.browse.* property The webui.itemlist.browse.* properties are no longer used in DSpace 7. (cherry picked from commit 35f72bc9d0c1a01aa0b9313216bbbe63e6960a81) --- dspace/config/dspace.cfg | 6 ------ 1 file changed, 6 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index c7f1cb80a978..1feaf05cda30 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1478,14 +1478,8 @@ log.report.dir = ${dspace.dir}/log # they are listed below is the priority in which they will be used (so a combination # of an index name and sort name will take precedence over just the browse name). # -# webui.itemlist.browse..sort..columns # webui.itemlist.sort..columns -# webui.itemlist.browse..columns # webui.itemlist..columns -# -# In the last case, a sort option name will always take precedence over a browse -# index name. Note also, that for any additional columns you list, you will need to -# ensure there is an itemlist. entry in the messages file. ##### SFX Server (OpenURL) ##### From e07763b021c009e421b97dc5b6404c284c9de168 Mon Sep 17 00:00:00 2001 From: Martin Walk Date: Thu, 3 Aug 2023 13:39:43 +0200 Subject: [PATCH 343/686] Fix #8963: Remove deletion constraint from Groomer --- .../main/java/org/dspace/eperson/Groomer.java | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/Groomer.java b/dspace-api/src/main/java/org/dspace/eperson/Groomer.java index 2a828cdc12b4..5485bb1d0ca9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Groomer.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Groomer.java @@ -141,20 +141,10 @@ private static void aging(CommandLine command) throws SQLException { System.out.println(); if (delete) { - List whyNot = ePersonService.getDeleteConstraints(myContext, account); - if (!whyNot.isEmpty()) { - System.out.print("\tCannot be deleted; referenced in"); - for (String table : whyNot) { - System.out.print(' '); - System.out.print(table); - } - System.out.println(); - } else { - try { - ePersonService.delete(myContext, account); - } catch (AuthorizeException | IOException ex) { - System.err.println(ex.getMessage()); - } + try { + ePersonService.delete(myContext, account); + } catch (AuthorizeException | IOException ex) { + System.err.println(ex.getMessage()); } } } From f803a2acf199b0dc6d111cb3f75029a51251f09d Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 2 Aug 2023 15:36:40 -0500 Subject: [PATCH 344/686] Enable entity type to submission form mapping by default (cherry picked from commit b71eee89c1e1dd7569e800e13eb8878548853ce6) --- dspace/config/item-submission.xml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index a6cd49bdf1e8..1060a3303119 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -55,9 +55,7 @@ - - - - - - PLEASE NOTICE THAT YOU WILL HAVE TO RESTART DSPACE - - - - - - - Uncomment if you intend to use them - --> - @@ -65,8 +63,6 @@ - --> - From 81cbb6692c148f4ade0458106376881f5f83e4a5 Mon Sep 17 00:00:00 2001 From: corrado lombardi Date: Wed, 19 Jul 2023 12:58:36 +0200 Subject: [PATCH 345/686] [DURACOM-179] replaced 'null' value with exception actual value in sendErrorResponse method calls having 'null' (cherry picked from commit aa35a47add5565a9302d276da2ceb22b8dbc320f) --- .../exception/DSpaceApiExceptionControllerAdvice.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java index 4ad1e479348f..a65ea13bc2c0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java @@ -132,7 +132,7 @@ protected void handleUnprocessableEntityException(HttpServletRequest request, Ht Exception ex) throws IOException { //422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity". //Using the value from HttpStatus. - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "Unprocessable or invalid entity", HttpStatus.UNPROCESSABLE_ENTITY.value()); } @@ -140,7 +140,7 @@ protected void handleUnprocessableEntityException(HttpServletRequest request, Ht @ExceptionHandler( {InvalidSearchRequestException.class}) protected void handleInvalidSearchRequestException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "Invalid search request", HttpStatus.UNPROCESSABLE_ENTITY.value()); } @@ -180,7 +180,7 @@ protected void handleCustomUnprocessableEntityException(HttpServletRequest reque TranslatableException ex) throws IOException { Context context = ContextUtil.obtainContext(request); sendErrorResponse( - request, response, null, ex.getLocalizedMessage(context), HttpStatus.UNPROCESSABLE_ENTITY.value() + request, response, (Exception) ex, ex.getLocalizedMessage(context), HttpStatus.UNPROCESSABLE_ENTITY.value() ); } @@ -188,7 +188,7 @@ protected void handleCustomUnprocessableEntityException(HttpServletRequest reque protected void ParameterConversionException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "A required parameter is invalid", HttpStatus.BAD_REQUEST.value()); } @@ -197,7 +197,7 @@ protected void ParameterConversionException(HttpServletRequest request, HttpServ protected void MissingParameterException(HttpServletRequest request, HttpServletResponse response, Exception ex) throws IOException { // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 - sendErrorResponse(request, response, null, + sendErrorResponse(request, response, ex, "A required parameter is missing", HttpStatus.BAD_REQUEST.value()); } From 2180b4e53550e9d79a999e314ec12444c5f42eb8 Mon Sep 17 00:00:00 2001 From: "David P. Steelman" Date: Mon, 3 Jul 2023 14:09:15 -0400 Subject: [PATCH 346/686] DS-8935. webui.browse.link CrossLinks - Fix for multiple exact matches Fixes #8935 when multiple exact match "webui.browse.link" configuration entries are present that point to different indexes. Modified the code to return the index associated with the given metadata (which is used as the key in the hash map), instead of the key from the keySet (which may not actually be the metadata value being searched for). https://github.com/DSpace/DSpace/issues/8935 (cherry picked from commit b846c53baaeae1e19fbbafa3dc7ca724bcaf32c1) --- .../java/org/dspace/browse/CrossLinks.java | 2 +- .../org/dspace/browse/CrossLinksTest.java | 103 ++++++++++++++++++ 2 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java diff --git a/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java b/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java index 1ce2e558866d..ec4cb199ea1d 100644 --- a/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java +++ b/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java @@ -108,7 +108,7 @@ public String findLinkType(String metadata) { } else { // Exact match, if the key field has no .* wildcard if (links.containsKey(metadata)) { - return links.get(key); + return links.get(metadata); } } } diff --git a/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java b/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java new file mode 100644 index 000000000000..83aab72d904e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.browse; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.dspace.AbstractDSpaceTest; +import org.dspace.services.ConfigurationService; +import org.dspace.utils.DSpace; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CrossLinks} + */ +public class CrossLinksTest extends AbstractDSpaceTest { + protected ConfigurationService configurationService; + + + @Before + public void setUp() { + configurationService = new DSpace().getConfigurationService(); + } + + @Test + public void testFindLinkType_Null() throws Exception { + CrossLinks crossLinks = new CrossLinks(); + assertNull(crossLinks.findLinkType(null)); + } + + @Test + public void testFindLinkType_NoMatch() throws Exception { + CrossLinks crossLinks = new CrossLinks(); + String metadataField = "foo.bar.baz.does.not.exist"; + assertNull(crossLinks.findLinkType(metadataField)); + } + + @Test + public void testFindLinkType_WildcardMatch() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + CrossLinks crossLinks = new CrossLinks(); + + String metadataField = "dc.contributor.author"; + assertEquals("author",crossLinks.findLinkType(metadataField)); + } + + @Test + public void testFindLinkType_SingleExactMatch_Author() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("type",crossLinks.findLinkType("dc.genre")); + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + } + + @Test + public void testFindLinkType_SingleExactMatch_Type() throws Exception { + configurationService.setProperty("webui.browse.link.1", "type:dc.genre"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("type",crossLinks.findLinkType("dc.genre")); + } + + @Test + public void testFindLinkType_MultipleExactMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author"); + configurationService.setProperty("webui.browse.link.2", "type:dc.genre"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("type",crossLinks.findLinkType("dc.genre")); + } + + @Test + public void testFindLinkType_MultipleWildcardMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh")); + } + + @Test + public void testFindLinkType_MultiplExactAndWildcardMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*"); + configurationService.setProperty("webui.browse.link.3", "type:dc.genre"); + configurationService.setProperty("webui.browse.link.4", "dateissued:dc.date.issued"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh")); + assertEquals("type",crossLinks.findLinkType("dc.genre")); + assertEquals("dateissued",crossLinks.findLinkType("dc.date.issued")); + } +} From 066dfbc8496fbf1c6e8ea76226aca91318807b63 Mon Sep 17 00:00:00 2001 From: Christian Bethge <54576195+ChrisBethgster@users.noreply.github.com> Date: Thu, 3 Aug 2023 13:01:12 +0200 Subject: [PATCH 347/686] #9006 fix referenced configuration file (cherry picked from commit 29a88d7e2dcfc36d2cd7991de3b84ef5f5623b63) --- .../src/main/java/org/dspace/statistics/GeoIpService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java index 7f8a11e5ba13..40fea6cf54da 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java @@ -37,7 +37,7 @@ public class GeoIpService { public DatabaseReader getDatabaseReader() throws IllegalStateException { String dbPath = configurationService.getProperty("usage-statistics.dbfile"); if (StringUtils.isBlank(dbPath)) { - throw new IllegalStateException("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + throw new IllegalStateException("The required 'dbfile' configuration is missing in usage-statistics.cfg!"); } try { From dd5c810b0ce4956db7a40c9ca4fd0058781705cb Mon Sep 17 00:00:00 2001 From: Christian Bethge <54576195+ChrisBethgster@users.noreply.github.com> Date: Thu, 3 Aug 2023 13:04:03 +0200 Subject: [PATCH 348/686] #9006 fix referenced configuration file (Test) (cherry picked from commit 309b0b355e4bffd6a1be3e6341dd6d17f99892c8) --- .../src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java index 8c1c534de14c..0bb679339877 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java @@ -67,7 +67,7 @@ public void testWithAdminUser() throws Exception { match("solrSearchCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), match("solrStatisticsCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), match("geoIp", UP_WITH_ISSUES_STATUS, - Map.of("reason", "The required 'dbfile' configuration is missing in solr-statistics.cfg!")) + Map.of("reason", "The required 'dbfile' configuration is missing in usage-statistics.cfg!")) ))); } From a3f2bd0bb7cfbfdd491156e1497c5c921c02faa6 Mon Sep 17 00:00:00 2001 From: "max.nuding" Date: Thu, 6 Jul 2023 09:17:59 +0200 Subject: [PATCH 349/686] Fix #8933: Only add the base statistic core if it hasn't already been added --- .../java/org/dspace/statistics/SolrLoggerServiceImpl.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 7853c3e11abf..9f34a4204721 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -1691,6 +1691,14 @@ protected synchronized void initSolrYearCores() { statisticYearCores .add(baseSolrUrl.replace("http://", "").replace("https://", "") + statCoreName); } + var baseCore = ((HttpSolrClient) solr) + .getBaseURL() + .replace("http://", "") + .replace("https://", ""); + if (!statisticYearCores.contains(baseCore)) { + //Also add the core containing the current year, if it hasn't been added already + statisticYearCores.add(baseCore); + } //Also add the core containing the current year ! statisticYearCores.add(((HttpSolrClient) solr) .getBaseURL() From 59f14cf67a6503737dfcc81e43540342961d6a31 Mon Sep 17 00:00:00 2001 From: Max Nuding Date: Fri, 4 Aug 2023 08:39:03 +0200 Subject: [PATCH 350/686] Remove duplicate code --- .../java/org/dspace/statistics/SolrLoggerServiceImpl.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 9f34a4204721..19c79af34dcc 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -1699,11 +1699,6 @@ protected synchronized void initSolrYearCores() { //Also add the core containing the current year, if it hasn't been added already statisticYearCores.add(baseCore); } - //Also add the core containing the current year ! - statisticYearCores.add(((HttpSolrClient) solr) - .getBaseURL() - .replace("http://", "") - .replace("https://", "")); } catch (IOException | SolrServerException e) { log.error(e.getMessage(), e); } From 40ced53032e9a3082ba563c4e0775a8e4b6a6af8 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Fri, 28 Jul 2023 10:50:07 -0400 Subject: [PATCH 351/686] On failure log the name of the assetstore file and trace causes of exception. (cherry picked from commit 22974e982c99b7faa9d287ddc5bef4715f19849a) --- .../mediafilter/MediaFilterServiceImpl.java | 12 +++--- .../java/org/dspace/util/ThrowableUtils.java | 41 +++++++++++++++++++ 2 files changed, 48 insertions(+), 5 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index e2c6c9c5db06..1a8c2ddd3ea4 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -40,6 +40,7 @@ import org.dspace.eperson.service.GroupService; import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; +import org.dspace.util.ThrowableUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -240,8 +241,9 @@ public boolean filterBitstream(Context context, Item myItem, sb.append("\tFile Size: ").append(size); sb.append("\tChecksum: ").append(checksum); sb.append("\tAsset Store: ").append(assetstore); + sb.append("\tInternal ID: ").append(myBitstream.getInternalId()); logError(sb.toString()); - logError(e.getMessage(), e); + logError(ThrowableUtils.formatCauseChain(e)); } } else if (filterClass instanceof SelfRegisterInputFormats) { // Filter implements self registration, so check to see if it should be applied @@ -319,10 +321,10 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo // check if destination bitstream exists Bundle existingBundle = null; - List existingBitstreams = new ArrayList(); + List existingBitstreams = new ArrayList<>(); List bundles = itemService.getBundles(item, formatFilter.getBundleName()); - if (bundles.size() > 0) { + if (!bundles.isEmpty()) { // only finds the last matching bundle and all matching bitstreams in the proper bundle(s) for (Bundle bundle : bundles) { List bitstreams = bundle.getBitstreams(); @@ -337,7 +339,7 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo } // if exists and overwrite = false, exit - if (!overWrite && (existingBitstreams.size() > 0)) { + if (!overWrite && (!existingBitstreams.isEmpty())) { if (!isQuiet) { logInfo("SKIPPED: bitstream " + source.getID() + " (item: " + item.getHandle() + ") because '" + newName + "' already exists"); @@ -370,7 +372,7 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo } Bundle targetBundle; // bundle we're modifying - if (bundles.size() < 1) { + if (bundles.isEmpty()) { // create new bundle if needed targetBundle = bundleService.create(context, item, formatFilter.getBundleName()); } else { diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java new file mode 100644 index 000000000000..7809e2048a07 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +/** + * Things you wish {@link Throwable} or some logging package would do for you. + * + * @author mwood + */ +public class ThrowableUtils { + /** + * Utility class: do not instantiate. + */ + private ThrowableUtils() { } + + /** + * Trace a chain of {@code Throwable}s showing only causes. + * Less voluminous than a stack trace. Useful if you just want to know + * what caused third-party code to return an uninformative exception + * message. + * + * @param throwable the exception or whatever. + * @return list of messages from each {@code Throwable} in the chain, + * separated by '\n'. + */ + static public String formatCauseChain(Throwable throwable) { + StringBuilder trace = new StringBuilder(); + trace.append(throwable.getMessage()); + Throwable cause = throwable.getCause(); + while (null != cause) { + trace.append("\nCaused by: ").append(cause.getMessage()); + cause = cause.getCause(); + } + return trace.toString(); + } +} From 8db2f1814334fed006bd624536921c4438abc9b5 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Fri, 28 Jul 2023 11:23:20 -0400 Subject: [PATCH 352/686] Report Throwable's type too. (cherry picked from commit d6b612fc5cf84fe6b7226649451b7b927ded8997) --- dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java index 7809e2048a07..e1502e89b514 100644 --- a/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java @@ -33,7 +33,9 @@ static public String formatCauseChain(Throwable throwable) { trace.append(throwable.getMessage()); Throwable cause = throwable.getCause(); while (null != cause) { - trace.append("\nCaused by: ").append(cause.getMessage()); + trace.append("\nCaused by: ") + .append(cause.getClass().getCanonicalName()).append(' ') + .append(cause.getMessage()); cause = cause.getCause(); } return trace.toString(); From 65241b7a3e7706128fa0955aa3fe3593b4a693be Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Mon, 31 Jul 2023 09:55:09 -0400 Subject: [PATCH 353/686] More description on OutOfMemoryError too. (cherry picked from commit bbe5df3f7dd4a33423fdf47702e23f3eb9ef821f) --- .../mediafilter/MediaFilterServiceImpl.java | 50 +++++++++++++------ 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index 1a8c2ddd3ea4..b50fb22355a3 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -10,6 +10,7 @@ import java.io.InputStream; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -226,23 +227,8 @@ public boolean filterBitstream(Context context, Item myItem, filtered = true; } } catch (Exception e) { - String handle = myItem.getHandle(); - List bundles = myBitstream.getBundles(); - long size = myBitstream.getSizeBytes(); - String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")"; - int assetstore = myBitstream.getStoreNumber(); - // Printout helpful information to find the errored bitstream. - StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n"); - sb.append("\tItem Handle: ").append(handle); - for (Bundle bundle : bundles) { - sb.append("\tBundle Name: ").append(bundle.getName()); - } - sb.append("\tFile Size: ").append(size); - sb.append("\tChecksum: ").append(checksum); - sb.append("\tAsset Store: ").append(assetstore); - sb.append("\tInternal ID: ").append(myBitstream.getInternalId()); - logError(sb.toString()); + logError(formatBitstreamDetails(myItem.getHandle(), myBitstream)); logError(ThrowableUtils.formatCauseChain(e)); } } else if (filterClass instanceof SelfRegisterInputFormats) { @@ -401,6 +387,7 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo } catch (OutOfMemoryError oome) { logError("!!! OutOfMemoryError !!!"); + logError(formatBitstreamDetails(item.getHandle(), source)); } // we are overwriting, so remove old bitstream @@ -498,6 +485,37 @@ public boolean inSkipList(String identifier) { } } + /** + * Describe a Bitstream in detail. Format a single line of text with + * information such as Bitstore index, backing file ID, size, checksum, + * enclosing Item and Bundles. + * + * @param itemHandle Handle of the Item by which we found the Bitstream. + * @param bitstream the Bitstream to be described. + * @return Bitstream details. + */ + private String formatBitstreamDetails(String itemHandle, + Bitstream bitstream) { + List bundles; + try { + bundles = bitstream.getBundles(); + } catch (SQLException ex) { + logError("Unexpected error fetching Bundles", ex); + bundles = Collections.EMPTY_LIST; + } + StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n"); + sb.append("\tItem Handle: ").append(itemHandle); + for (Bundle bundle : bundles) { + sb.append("\tBundle Name: ").append(bundle.getName()); + } + sb.append("\tFile Size: ").append(bitstream.getSizeBytes()); + sb.append("\tChecksum: ").append(bitstream.getChecksum()) + .append(" (").append(bitstream.getChecksumAlgorithm()).append(')'); + sb.append("\tAsset Store: ").append(bitstream.getStoreNumber()); + sb.append("\tInternal ID: ").append(bitstream.getInternalId()); + return sb.toString(); + } + private void logInfo(String message) { if (handler != null) { handler.logInfo(message); From b5f2f67ead31281ebbe143e4d88657d101f976d6 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 8 Aug 2023 16:43:12 -0500 Subject: [PATCH 354/686] Remove useless log.info (cherry picked from commit 1f3ad993cc4d10694112227245be3de1ec7b3762) --- .../src/main/java/org/dspace/content/BundleServiceImpl.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 20c43e4bfc73..546d48d4306b 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -194,7 +194,6 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) List defaultBitstreamReadGroups = authorizeService.getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_BITSTREAM_READ); - log.info(defaultBitstreamReadGroups.size()); // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy // inherited from the bundle with this policy. if (!defaultBitstreamReadGroups.isEmpty()) { From 22e209322c61bc1c7f940a1b686b87d90b5fd433 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Thu, 17 Aug 2023 16:43:21 +0200 Subject: [PATCH 355/686] 103837: Refactor GA config to list bundles --- .../google/GoogleAsyncEventListener.java | 30 +++++----- .../google/GoogleAsyncEventListenerIT.java | 56 ++++++++++++------- dspace/config/dspace.cfg | 12 ++-- 3 files changed, 60 insertions(+), 38 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java index e84d9f8591f2..cb9a120fd08f 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.core.Context; @@ -176,26 +177,27 @@ private String getDocumentPath(HttpServletRequest request) { * Verifies if the usage event is a content bitstream view event, by checking if:

    *
  • the usage event is a view event
  • *
  • the object of the usage event is a bitstream
  • - *
  • the bitstream belongs to the ORIGINAL bundle
- * This last one can be skipped if 'google-analytics.exclude-non-content-bitstreams' is set to false. - * This will make it so the bundle name is completely ignored when sending events. + *
  • the bitstream belongs to one of the configured bundles (fallback: ORIGINAL bundle)
  • */ private boolean isContentBitstream(UsageEvent usageEvent) { // check if event is a VIEW event and object is a Bitstream if (usageEvent.getAction() == UsageEvent.Action.VIEW || usageEvent.getObject().getType() == Constants.BITSTREAM) { - // check if config is set to true - if (configurationService.getBooleanProperty("google-analytics.exclude-non-content-bitstreams")) { - try { - // check if bitstream belongs to the ORIGINAL bundle - return ((Bitstream) usageEvent.getObject()) - .getBundles().stream() - .anyMatch(bundle -> bundle.getName().equals(Constants.CONTENT_BUNDLE_NAME)); - } catch (SQLException e) { - throw new RuntimeException(e.getMessage(), e); - } + // check if bitstream belongs to a configured bundle + List allowedBundles = List.of(configurationService + .getArrayProperty("google-analytics.bundles", new String[]{Constants.CONTENT_BUNDLE_NAME})); + if (allowedBundles.contains("none")) { + // GA events for bitstream views were turned off in config + return false; } - return true; + List bitstreamBundles; + try { + bitstreamBundles = ((Bitstream) usageEvent.getObject()) + .getBundles().stream().map(Bundle::getName).collect(Collectors.toList()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + return allowedBundles.stream().anyMatch(bitstreamBundles::contains); } return false; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java b/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java index e43e9fd82035..17df839ebf1f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/google/GoogleAsyncEventListenerIT.java @@ -245,17 +245,12 @@ public void testOnBitstreamContentDownloadWithTooManyEvents() throws Exception { } @Test - public void testOnBitstreamContentDownloadExcludeNonContentBitstreams() throws Exception { - configurationService.setProperty("google-analytics.exclude-non-content-bitstreams", true); - + public void testOnBitstreamContentDownloadDefaultBundleConfig() throws Exception { context.turnOffAuthorisationSystem(); Bundle licenseBundle = BundleBuilder.createBundle(context, item) .withName(Constants.LICENSE_BUNDLE_NAME).build(); Bitstream license = BitstreamBuilder.createBitstream(context, licenseBundle, toInputStream("License", defaultCharset())).build(); - Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); - Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, - toInputStream("Thumbnail", defaultCharset())).build(); context.restoreAuthSystemState(); assertThat(getStoredEventsAsList(), empty()); @@ -264,14 +259,14 @@ public void testOnBitstreamContentDownloadExcludeNonContentBitstreams() throws E downloadBitstreamContent("Postman", "123456", "REF"); downloadContent("Chrome", "ABCDEFG", "REF-1", license); - downloadContent("Chrome", "987654", "REF-2", thumbnail); assertThat(getStoredEventsAsList(), hasSize(1)); List storedEvents = getStoredEventsAsList(); assertThat(storedEvents, contains( - event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item"))); + event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item")) + ); googleAsyncEventListener.sendCollectedEvents(); @@ -284,14 +279,14 @@ public void testOnBitstreamContentDownloadExcludeNonContentBitstreams() throws E } @Test - public void testOnBitstreamContentDownloadIncludeNonContentBitstreams() throws Exception { - configurationService.setProperty("google-analytics.exclude-non-content-bitstreams", false); + public void testOnBitstreamContentDownloadMultipleBundleConfig() throws Exception { + configurationService.setProperty("google-analytics.bundles", + List.of(Constants.DEFAULT_BUNDLE_NAME, "CONTENT")); context.turnOffAuthorisationSystem(); - Bundle licenseBundle = BundleBuilder.createBundle(context, item) - .withName(Constants.LICENSE_BUNDLE_NAME).build(); - Bitstream license = BitstreamBuilder.createBitstream(context, licenseBundle, - toInputStream("License", defaultCharset())).build(); + Bundle contentBundle = BundleBuilder.createBundle(context, item).withName("CONTENT").build(); + Bitstream content = BitstreamBuilder.createBitstream(context, contentBundle, + toInputStream("Test Content", defaultCharset())).build(); Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, toInputStream("Thumbnail", defaultCharset())).build(); @@ -300,21 +295,20 @@ public void testOnBitstreamContentDownloadIncludeNonContentBitstreams() throws E assertThat(getStoredEventsAsList(), empty()); String bitstreamUrl = "/api/core/bitstreams/" + bitstream.getID() + "/content"; - String licenseUrl = "/api/core/bitstreams/" + license.getID() + "/content"; - String thumbnailUrl = "/api/core/bitstreams/" + thumbnail.getID() + "/content"; + String contentUrl = "/api/core/bitstreams/" + content.getID() + "/content"; downloadBitstreamContent("Postman", "123456", "REF"); - downloadContent("Chrome", "ABCDEFG", "REF-1", license); + downloadContent("Chrome", "ABCDEFG", "REF-1", content); downloadContent("Chrome", "987654", "REF-2", thumbnail); - assertThat(getStoredEventsAsList(), hasSize(3)); + assertThat(getStoredEventsAsList(), hasSize(2)); List storedEvents = getStoredEventsAsList(); assertThat(storedEvents, contains( event("123456", "127.0.0.1", "Postman", "REF", bitstreamUrl, "Test item"), - event("ABCDEFG", "127.0.0.1", "Chrome", "REF-1", licenseUrl, "Test item"), - event("987654", "127.0.0.1", "Chrome", "REF-2", thumbnailUrl, "Test item"))); + event("ABCDEFG", "127.0.0.1", "Chrome", "REF-1", contentUrl, "Test item") + )); googleAsyncEventListener.sendCollectedEvents(); @@ -326,6 +320,28 @@ public void testOnBitstreamContentDownloadIncludeNonContentBitstreams() throws E verifyNoMoreInteractions(firstGaClientMock, secondGaClientMock); } + @Test + public void testOnBitstreamContentDownloadNoneBundleConfig() throws Exception { + configurationService.setProperty("google-analytics.bundles", "none"); + + context.turnOffAuthorisationSystem(); + Bundle contentBundle = BundleBuilder.createBundle(context, item).withName("CONTENT").build(); + Bitstream content = BitstreamBuilder.createBitstream(context, contentBundle, + toInputStream("Test Content", defaultCharset())).build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item).withName("THUMBNAIL").build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, + toInputStream("Thumbnail", defaultCharset())).build(); + context.restoreAuthSystemState(); + + assertThat(getStoredEventsAsList(), empty()); + + downloadBitstreamContent("Postman", "123456", "REF"); + downloadContent("Chrome", "ABCDEFG", "REF-1", content); + downloadContent("Chrome", "987654", "REF-2", thumbnail); + + assertThat(getStoredEventsAsList(), empty()); + } + @SuppressWarnings("unchecked") private List getStoredEventsAsList() { List events = new ArrayList<>(); diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 89c8da92558c..be11cbf03303 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1537,10 +1537,14 @@ log.report.dir = ${dspace.dir}/log # For more details see https://developers.google.com/analytics/devguides/collection/protocol/ga4 # google.analytics.api-secret = -# Ensures only views of bitstreams in the 'ORIGINAL' bundle result in a GA4 event. -# Setting this to false may cause inflated bitstream view numbers, since requesting -# bitstreams in the 'THUMBNAIL' and 'LICENSE' bundles, will also result in GA4 events. -google-analytics.exclude-non-content-bitstreams=true +# Ensures only views of bitstreams in configured bundles result in a GA4 event. +# Config can contain multiple bundles for which the bitstream views will result in GA4 events, eg: +# google-analytics.bundles = ORIGINAL, CONTENT +# If config is not set or empty, the default fallback is Constants#CONTENT_BUNDLE_NAME bundle ('ORIGINAL'). +# If config contains 'LICENSE' or 'THUMBNAIL' bundles, it may cause inflated bitstream view numbers. +# Set config to 'none' to disable GA4 bitstream events, eg: +# google-analytics.bundles = none +google-analytics.bundles = ORIGINAL #################################################################### #---------------------------------------------------------------# From e6829c7b428ac370a3648a30cd577c01832ac94b Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 21 Aug 2023 23:35:23 +0200 Subject: [PATCH 356/686] README.md: Fix typo (cherry picked from commit ca8abddff1230e581501b482623966e64016d609) --- dspace-server-webapp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/README.md b/dspace-server-webapp/README.md index 8d3853e8ccc7..d418124ea171 100644 --- a/dspace-server-webapp/README.md +++ b/dspace-server-webapp/README.md @@ -10,7 +10,7 @@ This webapp uses the following technologies: We don't use Spring Data REST as we haven't a spring data layer and we want to provide clear separation between the persistence representation and the REST representation ## How to contribute -Check the infomation available on the DSpace Official Wiki page for the [DSpace 7 Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) +Check the information available on the DSpace Official Wiki page for the [DSpace 7 Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) [DSpace 7 REST: Coding DSpace Objects](https://wiki.duraspace.org/display/DSPACE/DSpace+7+REST%3A+Coding+DSpace+Objects) From c08f7625257e84e99d34aace28b3b4e4c58d1128 Mon Sep 17 00:00:00 2001 From: Hrafn Malmquist Date: Fri, 4 Aug 2023 19:04:14 +0100 Subject: [PATCH 357/686] Bump up versions of buildnumber-maven-plugin & build-helper-maven-plugin. add configuration for SCM failure (cherry picked from commit 78ea9e86df5e6737a4ce129120a6e1e6c74a371c) --- Dockerfile | 2 +- dspace-api/pom.xml | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index f1ff6adf5ac5..9c32ecb50cd4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,7 +50,7 @@ RUN ant init_installation update_configs update_code update_webapps FROM tomcat:9-jdk${JDK_VERSION} # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace -# Copy the /dspace directory from 'ant_build' containger to /dspace in this container +# Copy the /dspace directory from 'ant_build' container to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL # Expose Tomcat port and AJP port EXPOSE 8080 8009 diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index c05546d56944..ec9c423a9ea7 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -102,7 +102,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.0.0 + 3.4.0 validate @@ -116,7 +116,10 @@ org.codehaus.mojo buildnumber-maven-plugin - 1.4 + 3.2.0 + + UNKNOWN_REVISION + validate From 79e47837ae145d08f790274ff5384c4ba6b1e3de Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Fri, 11 Aug 2023 08:47:31 +0200 Subject: [PATCH 358/686] 3331 - remove the --optimize feature of 'dspace stats-util' (cherry picked from commit 08c650921d7ce5210906db846ff29a053c2155a7) --- .../dspace/statistics/SolrLoggerServiceImpl.java | 16 ---------------- .../statistics/service/SolrLoggerService.java | 6 ------ .../dspace/statistics/util/StatisticsClient.java | 3 --- 3 files changed, 25 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 19c79af34dcc..97585f5a47cb 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -1203,22 +1203,6 @@ public String getIgnoreSpiderIPs() { } - @Override - public void optimizeSOLR() { - try { - long start = System.currentTimeMillis(); - System.out.println("SOLR Optimize -- Process Started:" + start); - solr.optimize(); - long finish = System.currentTimeMillis(); - System.out.println("SOLR Optimize -- Process Finished:" + finish); - System.out.println("SOLR Optimize -- Total time taken:" + (finish - start) + " (ms)."); - } catch (SolrServerException sse) { - System.err.println(sse.getMessage()); - } catch (IOException ioe) { - System.err.println(ioe.getMessage()); - } - } - @Override public void shardSolrIndex() throws IOException, SolrServerException { if (!(solr instanceof HttpSolrClient)) { diff --git a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java index 3728318625e3..61b2bb6013de 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java @@ -266,12 +266,6 @@ public QueryResponse query(String query, String filterQuery, */ public String getIgnoreSpiderIPs(); - /** - * Maintenance to keep a SOLR index efficient. - * Note: This might take a long time. - */ - public void optimizeSOLR(); - public void shardSolrIndex() throws IOException, SolrServerException; public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception; diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java index e45ce163ed77..319fe437d648 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java @@ -67,7 +67,6 @@ public static void main(String[] args) throws Exception { options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr"); options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag"); options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address"); - options.addOption("o", "optimize", false, "Run maintenance on the SOLR index"); options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name"); options.addOption("e", "export", false, "Export SOLR view statistics data to usage-statistics-intermediate-format"); @@ -93,8 +92,6 @@ public static void main(String[] args) throws Exception { solrLoggerService.deleteRobotsByIsBotFlag(); } else if (line.hasOption('i')) { solrLoggerService.deleteRobotsByIP(); - } else if (line.hasOption('o')) { - solrLoggerService.optimizeSOLR(); } else if (line.hasOption('b')) { solrLoggerService.reindexBitstreamHits(line.hasOption('r')); } else if (line.hasOption('e')) { From ae2c253082c5952ecebe94ed759d86d38bafc455 Mon Sep 17 00:00:00 2001 From: Christian Bethge Date: Mon, 7 Aug 2023 09:42:07 +0200 Subject: [PATCH 359/686] fix MissingOptionException on help (cherry picked from commit 8ae5ffbf9b41fe2ad4e1146eceeff47e6ba985a0) --- .../dspace/app/launcher/ScriptLauncher.java | 14 ++++- .../org/dspace/scripts/DSpaceRunnable.java | 55 +++++++++++++++++-- .../configuration/ScriptConfiguration.java | 15 +++++ .../dspace/app/bulkedit/MetadataExportIT.java | 15 +++-- .../dspace/app/bulkedit/MetadataImportIT.java | 5 +- .../app/csv/CSVMetadataImportReferenceIT.java | 5 +- .../java/org/dspace/curate/CurationIT.java | 10 ++-- 7 files changed, 98 insertions(+), 21 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index fcb2098bd066..e6df016613b9 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.DSpaceRunnable.StepResult; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -145,9 +146,16 @@ public static int handleScript(String[] args, Document commandConfigs, private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, DSpaceRunnable script) { try { - script.initialize(args, dSpaceRunnableHandler, null); - script.run(); - return 0; + StepResult result = script.initialize(args, dSpaceRunnableHandler, null); + + if (StepResult.Continue.equals(result)) { + // only run the script, if the normal initialize is successful + script.run(); + } else { + // otherwise - for example the script is started with the help argument + } + + return 0; } catch (ParseException e) { script.printHelp(); e.printStackTrace(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 2319aee31752..4f64f6973179 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -35,6 +35,11 @@ public abstract class DSpaceRunnable implements R * The CommandLine object for the script that'll hold the information */ protected CommandLine commandLine; + + /** + * The minimal CommandLine object for the script that'll hold help information + */ + protected CommandLine helpCommandLine; /** * This EPerson identifier variable is the UUID of the EPerson that's running the script @@ -64,27 +69,64 @@ private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { * @param args The arguments given to the script * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran * @param currentUser + * @return the result of this step; StepResult.Continue: continue the normal process, initialize is successful; + * otherwise exit the process (the help or version is shown) * @throws ParseException If something goes wrong */ - public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, + public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, EPerson currentUser) throws ParseException { if (currentUser != null) { this.setEpersonIdentifier(currentUser.getID()); } this.setHandler(dSpaceRunnableHandler); - this.parse(args); + + // parse the command line in a first step for the help options + // --> no other option is required + StepResult result = this.parseForHelp(args); + switch (result) { + case Exit: + // arguments of the command line matches the help options, handle this + handleHelpCommandLine(); + break; + + case Continue: + // arguments of the command line matches NOT the help options, parse the args for the normal options + result = this.parse(args); + break; + } + + return result; } - /** + + /** This method handle the help command line. In this easy implementation only the help is printed. + * For more complexity override this method. + */ + private void handleHelpCommandLine() { + printHelp(); + } + + + /** * This method will take the primitive array of String objects that represent the parameters given to the String * and it'll parse these into a CommandLine object that can be used by the script to retrieve the data * @param args The primitive array of Strings representing the parameters * @throws ParseException If something goes wrong */ - private void parse(String[] args) throws ParseException { + private StepResult parse(String[] args) throws ParseException { commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args); setup(); + return StepResult.Continue; } + + private StepResult parseForHelp(String[] args) throws ParseException { + helpCommandLine = new DefaultParser().parse(getScriptConfiguration().getHelpOptions(), args); + if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { + return StepResult.Exit; + } + + return StepResult.Continue; + } /** * This method has to be included in every script and handles the setup of the script by parsing the CommandLine @@ -158,4 +200,9 @@ public UUID getEpersonIdentifier() { public void setEpersonIdentifier(UUID epersonIdentifier) { this.epersonIdentifier = epersonIdentifier; } + + public enum StepResult { + Continue, + Exit; + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 642409a924f7..62f30f99f65a 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -10,6 +10,7 @@ import java.sql.SQLException; import java.util.List; +import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; @@ -104,6 +105,20 @@ public boolean isAllowedToExecute(Context context, List Date: Mon, 7 Aug 2023 11:27:08 +0200 Subject: [PATCH 360/686] fix stylecheck (cherry picked from commit b1377ca1ef82d80f2ece9b48b8f1571e786c4525) --- .../dspace/app/launcher/ScriptLauncher.java | 10 ++- .../org/dspace/scripts/DSpaceRunnable.java | 70 ++++++++++--------- .../configuration/ScriptConfiguration.java | 11 ++- .../dspace/app/bulkedit/MetadataExportIT.java | 18 ++--- .../dspace/app/bulkedit/MetadataImportIT.java | 6 +- .../app/csv/CSVMetadataImportReferenceIT.java | 7 +- .../java/org/dspace/curate/CurationIT.java | 12 ++-- 7 files changed, 67 insertions(+), 67 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index e6df016613b9..bcb61a48ee76 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -147,15 +147,13 @@ private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunn DSpaceRunnable script) { try { StepResult result = script.initialize(args, dSpaceRunnableHandler, null); - if (StepResult.Continue.equals(result)) { - // only run the script, if the normal initialize is successful - script.run(); + // only run the script, if the normal initialize is successful + script.run(); } else { - // otherwise - for example the script is started with the help argument + // otherwise - for example the script is started with the help argument } - - return 0; + return 0; } catch (ParseException e) { script.printHelp(); e.printStackTrace(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 4f64f6973179..7fb8567f8cd1 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -35,7 +35,7 @@ public abstract class DSpaceRunnable implements R * The CommandLine object for the script that'll hold the information */ protected CommandLine commandLine; - + /** * The minimal CommandLine object for the script that'll hold help information */ @@ -69,8 +69,8 @@ private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { * @param args The arguments given to the script * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran * @param currentUser - * @return the result of this step; StepResult.Continue: continue the normal process, initialize is successful; - * otherwise exit the process (the help or version is shown) + * @return the result of this step; StepResult.Continue: continue the normal process, + * initialize is successful; otherwise exit the process (the help or version is shown) * @throws ParseException If something goes wrong */ public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, @@ -79,35 +79,38 @@ public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnable this.setEpersonIdentifier(currentUser.getID()); } this.setHandler(dSpaceRunnableHandler); - - // parse the command line in a first step for the help options + + // parse the command line in a first step for the help options // --> no other option is required StepResult result = this.parseForHelp(args); switch (result) { - case Exit: - // arguments of the command line matches the help options, handle this - handleHelpCommandLine(); - break; - - case Continue: - // arguments of the command line matches NOT the help options, parse the args for the normal options - result = this.parse(args); - break; - } - + case Exit: + // arguments of the command line matches the help options, handle this + handleHelpCommandLine(); + break; + + case Continue: + // arguments of the command line matches NOT the help options, parse the args for the normal options + result = this.parse(args); + break; + default: + break; + } + return result; } - /** This method handle the help command line. In this easy implementation only the help is printed. - * For more complexity override this method. - */ - private void handleHelpCommandLine() { - printHelp(); - } + /** + * This method handle the help command line. In this easy implementation only the help is printed. For more + * complexity override this method. + */ + private void handleHelpCommandLine() { + printHelp(); + } - /** + /** * This method will take the primitive array of String objects that represent the parameters given to the String * and it'll parse these into a CommandLine object that can be used by the script to retrieve the data * @param args The primitive array of Strings representing the parameters @@ -118,15 +121,15 @@ private StepResult parse(String[] args) throws ParseException { setup(); return StepResult.Continue; } - + private StepResult parseForHelp(String[] args) throws ParseException { - helpCommandLine = new DefaultParser().parse(getScriptConfiguration().getHelpOptions(), args); - if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { - return StepResult.Exit; - } - - return StepResult.Continue; - } + helpCommandLine = new DefaultParser().parse(getScriptConfiguration().getHelpOptions(), args); + if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { + return StepResult.Exit; + } + + return StepResult.Continue; + } /** * This method has to be included in every script and handles the setup of the script by parsing the CommandLine @@ -200,9 +203,8 @@ public UUID getEpersonIdentifier() { public void setEpersonIdentifier(UUID epersonIdentifier) { this.epersonIdentifier = epersonIdentifier; } - + public enum StepResult { - Continue, - Exit; + Continue, Exit; } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 62f30f99f65a..bbedab04e278 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -105,17 +105,16 @@ public boolean isAllowedToExecute(Context context, List Date: Tue, 8 Aug 2023 09:36:16 +0200 Subject: [PATCH 361/686] ingore unrecognized arguments on help (cherry picked from commit 82c9b6fc9baee9f4f5d8b4cc967b5d12b63cdd39) --- dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 7fb8567f8cd1..5e16fea5aecd 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -123,7 +123,7 @@ private StepResult parse(String[] args) throws ParseException { } private StepResult parseForHelp(String[] args) throws ParseException { - helpCommandLine = new DefaultParser().parse(getScriptConfiguration().getHelpOptions(), args); + helpCommandLine = new DefaultParser().parse(getScriptConfiguration().getHelpOptions(), args, true); if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { return StepResult.Exit; } From 8045f6a01486e3101605f92910425e824a031666 Mon Sep 17 00:00:00 2001 From: Christian Bethge Date: Wed, 23 Aug 2023 10:33:51 +0200 Subject: [PATCH 362/686] add DSpaceSkipUnknownArgumentsParser as parser to ignore/skip unknown arguments in cli by help, fix not necessary ParseException in help (cherry picked from commit 86285d78aa0844b9811dcebdefa897ceeb944226) --- .../cli/DSpaceSkipUnknownArgumentsParser.java | 77 +++++++++++++++++++ .../org/dspace/scripts/DSpaceRunnable.java | 3 +- 2 files changed, 79 insertions(+), 1 deletion(-) create mode 100644 dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java diff --git a/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java new file mode 100644 index 000000000000..afd74a588d17 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.cli; + +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + +/** + * Extended version of the DefaultParser. This parser skip/ignore unknown arguments. + */ +public class DSpaceSkipUnknownArgumentsParser extends DefaultParser { + + + @Override + public CommandLine parse(Options options, String[] arguments) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments)); + } + + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, boolean stopAtNonOption) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), stopAtNonOption); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param properties command line option name-value pairs + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties, boolean stopAtNonOption) + throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties, stopAtNonOption); + } + + + private String[] getOnlyKnownArguments(Options options, String[] arguments) { + List knownArguments = new ArrayList<>(); + for (String arg : arguments) { + if (options.hasOption(arg)) { + knownArguments.add(arg); + } + } + return knownArguments.toArray(new String[0]); + } +} diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 5e16fea5aecd..2ea0a52d6e34 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -18,6 +18,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; +import org.dspace.cli.DSpaceSkipUnknownArgumentsParser; import org.dspace.eperson.EPerson; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -123,7 +124,7 @@ private StepResult parse(String[] args) throws ParseException { } private StepResult parseForHelp(String[] args) throws ParseException { - helpCommandLine = new DefaultParser().parse(getScriptConfiguration().getHelpOptions(), args, true); + helpCommandLine = new DSpaceSkipUnknownArgumentsParser().parse(getScriptConfiguration().getHelpOptions(), args); if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { return StepResult.Exit; } From 7f1875c945a7a140a790b00db8c009bdcdf80492 Mon Sep 17 00:00:00 2001 From: Christian Bethge Date: Wed, 23 Aug 2023 10:41:44 +0200 Subject: [PATCH 363/686] remove not necessary else (cherry picked from commit 064e2caa37dfa283c3c08dee0e7321e36073bfa2) --- .../main/java/org/dspace/app/launcher/ScriptLauncher.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index bcb61a48ee76..89a416bfa883 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -147,11 +147,11 @@ private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunn DSpaceRunnable script) { try { StepResult result = script.initialize(args, dSpaceRunnableHandler, null); + // check the StepResult, only run the script if the result is Continue; + // otherwise - for example the script is started with the help as argument, nothing is to do if (StepResult.Continue.equals(result)) { - // only run the script, if the normal initialize is successful + // runs the script, the normal initialization is successful script.run(); - } else { - // otherwise - for example the script is started with the help argument } return 0; } catch (ParseException e) { From bd1eb00d2d3f7649dc1e3361a4965dbedb945af1 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 23 Aug 2023 16:51:26 -0500 Subject: [PATCH 364/686] Enable new skip merge commit feature --- .github/workflows/port_merged_pull_request.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/port_merged_pull_request.yml b/.github/workflows/port_merged_pull_request.yml index 50faf3f88679..109835d14d3c 100644 --- a/.github/workflows/port_merged_pull_request.yml +++ b/.github/workflows/port_merged_pull_request.yml @@ -39,6 +39,8 @@ jobs: # Copy all labels from original PR to (newly created) port PR # NOTE: The labels matching 'label_pattern' are automatically excluded copy_labels_pattern: '.*' + # Skip any merge commits in the ported PR. This means only non-merge commits are cherry-picked to the new PR + merge_commits: 'skip' # Use a personal access token (PAT) to create PR as 'dspace-bot' user. # A PAT is required in order for the new PR to trigger its own actions (for CI checks) github_token: ${{ secrets.PR_PORT_TOKEN }} \ No newline at end of file From 1fc8fc637c90154a6c01fc944c7edc03bb278d6a Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 24 Aug 2023 10:57:16 +0300 Subject: [PATCH 365/686] dspace-api: remove unnecessary trailing whitespace --- .../org/dspace/scripts/configuration/ScriptConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index bbedab04e278..ec8e3632cfe3 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -108,7 +108,7 @@ public boolean isAllowedToExecute(Context context, List Date: Fri, 4 Aug 2023 11:39:35 +0200 Subject: [PATCH 366/686] 103818 ItemServiceImpl#inheritCollectionDefaultPolicies now clears item READ policies if new parent collection has a default READ policy --- .../org/dspace/content/ItemServiceImpl.java | 8 +++ .../content/service/ItemServiceTest.java | 54 +++++++++++++++++++ 2 files changed, 62 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 8d1ba14b2c4a..3458361f43e6 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -920,6 +920,14 @@ public void removeGroupPolicies(Context context, Item item, Group group) throws @Override public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { + + // If collection has READ policies, remove the item's READ policies. + List defaultCollectionPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + if (!defaultCollectionPolicies.isEmpty()) { + authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + } + adjustItemPolicies(context, item, collection); adjustBundleBitstreamPolicies(context, item, collection); diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java index 50b4d3f3b48e..1847a27c7f5a 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java @@ -26,6 +26,8 @@ import org.dspace.app.requestitem.RequestItem; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -48,6 +50,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; import org.dspace.versioning.Version; import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.service.VersioningService; @@ -68,6 +72,8 @@ public class ItemServiceTest extends AbstractIntegrationTestWithDatabase { protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService(); + protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); Community community; Collection collection1; @@ -752,6 +758,54 @@ public void testRemoveItemThatHasRequests() throws Exception { assertNull(itemService.find(context, item.getID())); } + + @Test + public void testMoveItemToCollectionWithMoreRestrictiveReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default READ policy + * to a collection with a restrictive default READ policy, + * that the item does not retain the original permissive READ policy. + */ + + context.turnOffAuthorisationSystem(); + + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + Group admin = groupService.findByName(context, Group.ADMIN); + + // Set up the two different collections: one permissive and one restrictive in its default READ policy. + Collection permissive = CollectionBuilder + .createCollection(context, community) + .build(); + Collection restrictive = CollectionBuilder + .createCollection(context, community) + .build(); + authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_ITEM_READ); + authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_ITEM_READ, admin); + + // Add an item to the permissive collection. + Item item = ItemBuilder + .createItem(context, permissive) + .build(); + + // Verify that the item has exactly one READ policy, for the anonymous group. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + // Move the item to the restrictive collection, making sure to inherit default policies. + itemService.move(context, item, permissive, restrictive, true); + + // Verify that the item has exactly one READ policy, but now for the admin group. + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + context.restoreAuthSystemState(); + } + private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value, String authority, int place, MetadataValue metadataValue) { assertThat(metadataValue.getValue(), equalTo(value)); From 03961316dd9f0dd004a293ccfdac52853e27269a Mon Sep 17 00:00:00 2001 From: Koen Pauwels Date: Fri, 4 Aug 2023 15:34:14 +0200 Subject: [PATCH 367/686] 103818 Add boolean parameter to ItemServiceImpl#inheritCollectionDefaultPolicies to decide whether to override item read policies --- .../content/InstallItemServiceImpl.java | 2 +- .../org/dspace/content/ItemServiceImpl.java | 17 ++++++++++---- .../dspace/content/service/ItemService.java | 22 ++++++++++++++++++- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java index 32c5b92c605b..b52043e2673c 100644 --- a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java @@ -93,7 +93,7 @@ public Item installItem(Context c, InProgressSubmission is, // As this is a BRAND NEW item, as a final step we need to remove the // submitter item policies created during deposit and replace them with // the default policies from the collection. - itemService.inheritCollectionDefaultPolicies(c, item, collection); + itemService.inheritCollectionDefaultPolicies(c, item, collection, false); return item; } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 3458361f43e6..663585034d12 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -920,12 +920,21 @@ public void removeGroupPolicies(Context context, Item item, Group group) throws @Override public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { + inheritCollectionDefaultPolicies(context, item, collection, true); + } + + @Override + public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection, + boolean overrideItemReadPolicies) + throws SQLException, AuthorizeException { // If collection has READ policies, remove the item's READ policies. - List defaultCollectionPolicies = authorizeService - .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); - if (!defaultCollectionPolicies.isEmpty()) { - authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + if (overrideItemReadPolicies) { + List defaultCollectionPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + if (!defaultCollectionPolicies.isEmpty()) { + authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + } } adjustItemPolicies(context, item, collection); diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index b6bf7aa5cfa2..8b93e953eb81 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -473,7 +473,7 @@ public void replaceAllBitstreamPolicies(Context context, Item item, List Date: Fri, 4 Aug 2023 16:38:30 +0200 Subject: [PATCH 368/686] 103818 Add boolean parameters to ItemServiceImpl methodds to decide whether to override read policies --- .../org/dspace/content/ItemServiceImpl.java | 47 +++++++--- .../dspace/content/service/ItemService.java | 93 ++++++++++++++++--- 2 files changed, 113 insertions(+), 27 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 663585034d12..ac38f0cca49c 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -925,20 +925,11 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect @Override public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection, - boolean overrideItemReadPolicies) + boolean replaceReadRPWithCollectionRP) throws SQLException, AuthorizeException { - // If collection has READ policies, remove the item's READ policies. - if (overrideItemReadPolicies) { - List defaultCollectionPolicies = authorizeService - .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); - if (!defaultCollectionPolicies.isEmpty()) { - authorizeService.removePoliciesActionFilter(context, item, Constants.READ); - } - } - - adjustItemPolicies(context, item, collection); - adjustBundleBitstreamPolicies(context, item, collection); + adjustItemPolicies(context, item, collection, replaceReadRPWithCollectionRP); + adjustBundleBitstreamPolicies(context, item, collection, replaceReadRPWithCollectionRP); log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies", "item_id=" + item.getID())); @@ -947,6 +938,13 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect @Override public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { + adjustBundleBitstreamPolicies(context, item, collection, true); + } + + @Override + public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other // policies or embargos applied @@ -969,6 +967,10 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection // Remove bundles List bunds = item.getBundles(); for (Bundle mybundle : bunds) { + // If collection has default READ policies, remove the bitstream's READ policies. + if (replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0) { + authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + } // if come from InstallItem: remove all submission/workflow policies authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); @@ -985,7 +987,14 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection } @Override - public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream) + public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) + throws SQLException, AuthorizeException { + adjustBitstreamPolicies(context, item, collection, bitstream, true); + } + + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream, + boolean replaceReadRPWithCollectionRP) throws SQLException, AuthorizeException { List defaultCollectionPolicies = authorizeService .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); @@ -1015,10 +1024,22 @@ private void removeAllPoliciesAndAddDefault(Context context, Bitstream bitstream @Override public void adjustItemPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { + adjustItemPolicies(context, item, collection, true); + } + + @Override + public void adjustItemPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { // read collection's default READ policies List defaultCollectionPolicies = authorizeService .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // If collection has defaultREAD policies, remove the item's READ policies. + if (replaceReadRPWithCollectionRP && defaultCollectionPolicies.size() > 0) { + authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + } + // MUST have default policies if (defaultCollectionPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index 8b93e953eb81..de7644af83fe 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -527,6 +527,28 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException; + /** + * Adjust the Bundle and Bitstream policies to reflect what have been defined + * during the submission/workflow. The temporary SUBMISSION and WORKFLOW + * policies are removed and the policies defined at the item and collection + * level are copied and inherited as appropriate. Custom selected Item policies + * are copied to the bundle/bitstream only if no explicit custom policies were + * already applied to the bundle/bitstream. Collection's policies are inherited + * if there are no other policies defined or if the append mode is defined by + * the configuration via the core.authorization.installitem.inheritance-read.append-mode property + * + * @param context DSpace context object + * @param item Item to adjust policies on + * @param collection Collection + * @param replaceReadRPWithCollectionRP if true, all read policies on the item are replaced (but only if the + * collection has a default read policy) + * @throws SQLException If database error + * @throws AuthorizeException If authorization error + */ + public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException; + /** * Adjust the Bitstream policies to reflect what have been defined * during the submission/workflow. The temporary SUBMISSION and WORKFLOW @@ -547,6 +569,29 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) throws SQLException, AuthorizeException; + /** + * Adjust the Bitstream policies to reflect what have been defined + * during the submission/workflow. The temporary SUBMISSION and WORKFLOW + * policies are removed and the policies defined at the item and collection + * level are copied and inherited as appropriate. Custom selected Item policies + * are copied to the bitstream only if no explicit custom policies were + * already applied to the bitstream. Collection's policies are inherited + * if there are no other policies defined or if the append mode is defined by + * the configuration via the core.authorization.installitem.inheritance-read.append-mode property + * + * @param context DSpace context object + * @param item Item to adjust policies on + * @param collection Collection + * @param bitstream Bitstream to adjust policies on + * @param replaceReadRPWithCollectionRP If true, all read policies on the bitstream are replaced (but only if the + * collection has a default read policy) + * @throws SQLException If database error + * @throws AuthorizeException If authorization error + */ + public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException; + /** * Adjust the Item's policies to reflect what have been defined during the @@ -565,6 +610,26 @@ public void adjustBitstreamPolicies(Context context, Item item, Collection colle public void adjustItemPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException; + /** + * Adjust the Item's policies to reflect what have been defined during the + * submission/workflow. The temporary SUBMISSION and WORKFLOW policies are + * removed and the default policies defined at the collection level are + * inherited as appropriate. Collection's policies are inherited if there are no + * other policies defined or if the append mode is defined by the configuration + * via the core.authorization.installitem.inheritance-read.append-mode property + * + * @param context DSpace context object + * @param item Item to adjust policies on + * @param collection Collection + * @param replaceReadRPWithCollectionRP If true, all read policies on the item are replaced (but only if the + * collection has a default read policy) + * @throws SQLException If database error + * @throws AuthorizeException If authorization error + */ + public void adjustItemPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException; + /** * Moves the item from one collection to another one * @@ -810,24 +875,24 @@ public Iterator findByLastModifiedSince(Context context, Date last) int countWithdrawnItems(Context context) throws SQLException; /** - * finds all items for which the current user has editing rights - * @param context DSpace context object - * @param offset page offset - * @param limit page size limit - * @return list of items for which the current user has editing rights - * @throws SQLException - * @throws SearchServiceException - */ + * finds all items for which the current user has editing rights + * @param context DSpace context object + * @param offset page offset + * @param limit page size limit + * @return list of items for which the current user has editing rights + * @throws SQLException + * @throws SearchServiceException + */ public List findItemsWithEdit(Context context, int offset, int limit) throws SQLException, SearchServiceException; /** - * counts all items for which the current user has editing rights - * @param context DSpace context object - * @return list of items for which the current user has editing rights - * @throws SQLException - * @throws SearchServiceException - */ + * counts all items for which the current user has editing rights + * @param context DSpace context object + * @return list of items for which the current user has editing rights + * @throws SQLException + * @throws SearchServiceException + */ public int countItemsWithEdit(Context context) throws SQLException, SearchServiceException; /** From 502230b271c3fc67470d9cdfe2905d73e2c5f4fd Mon Sep 17 00:00:00 2001 From: Koen Pauwels Date: Fri, 4 Aug 2023 17:00:35 +0200 Subject: [PATCH 369/686] 103818 Extend ItemServiceTest#testMoveItemToCollectionWithMoreRestrictiveReadPolicy --- .../org/dspace/content/ItemServiceImpl.java | 16 ++++++++-- .../content/service/ItemServiceTest.java | 30 +++++++++++++++++-- 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index ac38f0cca49c..ebea2aa5b820 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -963,13 +963,18 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection } // TODO: should we also throw an exception if no DEFAULT_ITEM_READ? + boolean removeCurrentReadRPBitstream = + replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0; + boolean removeCurrentReadRPBundle = + replaceReadRPWithCollectionRP && defaultCollectionBundlePolicies.size() > 0; + // remove all policies from bundles, add new ones // Remove bundles List bunds = item.getBundles(); for (Bundle mybundle : bunds) { - // If collection has default READ policies, remove the bitstream's READ policies. - if (replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0) { - authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + // If collection has default READ policies, remove the bundle's READ policies. + if (removeCurrentReadRPBundle) { + authorizeService.removePoliciesActionFilter(context, mybundle, Constants.READ); } // if come from InstallItem: remove all submission/workflow policies @@ -979,6 +984,11 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); for (Bitstream bitstream : mybundle.getBitstreams()) { + // If collection has default READ policies, remove the bundle's READ policies. + if (removeCurrentReadRPBitstream) { + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + } + // if come from InstallItem: remove all submission/workflow policies removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionBitstreamPolicies); diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java index 1847a27c7f5a..18e0047599ce 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java @@ -39,6 +39,7 @@ import org.dspace.builder.RequestItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.EntityType; @@ -786,22 +787,47 @@ public void testMoveItemToCollectionWithMoreRestrictiveReadPolicy() throws Excep .createItem(context, permissive) .build(); - // Verify that the item has exactly one READ policy, for the anonymous group. + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream()) + .build(); + + Bundle bundle = item.getBundles("ORIGINAL").get(0); + + // Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group. assertEquals( List.of(anonymous), authorizeService.getPoliciesActionFilter(context, item, Constants.READ) .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); // Move the item to the restrictive collection, making sure to inherit default policies. itemService.move(context, item, permissive, restrictive, true); - // Verify that the item has exactly one READ policy, but now for the admin group. + // Verify that the item, bundle and bitstream each have exactly one READ policy, but now for the admin group. assertEquals( List.of(admin), authorizeService.getPoliciesActionFilter(context, item, Constants.READ) .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); context.restoreAuthSystemState(); } From 54f174da045964730bb9cfb399c14b8539a583ab Mon Sep 17 00:00:00 2001 From: Koen Pauwels Date: Tue, 8 Aug 2023 11:45:44 +0200 Subject: [PATCH 370/686] 104878 Fix error in ItemServiceTest related to inheriting collection policies upon item move --- .../content/service/ItemServiceTest.java | 85 +++++++++++++++++-- 1 file changed, 80 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java index 18e0047599ce..16d78a8e3e45 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java @@ -761,10 +761,11 @@ public void testRemoveItemThatHasRequests() throws Exception { } @Test - public void testMoveItemToCollectionWithMoreRestrictiveReadPolicy() throws Exception { - /* Verify that, if we move an item from a collection with a permissive default READ policy - * to a collection with a restrictive default READ policy, - * that the item does not retain the original permissive READ policy. + public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default item READ policy + * to a collection with a restrictive default item READ policy, + * that the item does not retain the original permissive item READ policy. + * However, its bundles and bitstreams do. */ context.turnOffAuthorisationSystem(); @@ -812,7 +813,7 @@ public void testMoveItemToCollectionWithMoreRestrictiveReadPolicy() throws Excep // Move the item to the restrictive collection, making sure to inherit default policies. itemService.move(context, item, permissive, restrictive, true); - // Verify that the item, bundle and bitstream each have exactly one READ policy, but now for the admin group. + // Verify that the item's read policy now only allows administrators. assertEquals( List.of(admin), authorizeService.getPoliciesActionFilter(context, item, Constants.READ) @@ -832,6 +833,80 @@ public void testMoveItemToCollectionWithMoreRestrictiveReadPolicy() throws Excep context.restoreAuthSystemState(); } + @Test + public void testMoveItemToCollectionWithMoreRestrictiveBitstreamReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default bitstream READ policy + * to a collection with a restrictive default bitstream READ policy, + * that the item's bundles and bitstreams do not retain the original permissive READ policy. + * However, the item itself does retain the original policy. + */ + + context.turnOffAuthorisationSystem(); + + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + Group admin = groupService.findByName(context, Group.ADMIN); + + // Set up the two different collections: one permissive and one restrictive in its default READ policy. + Collection permissive = CollectionBuilder + .createCollection(context, community) + .build(); + Collection restrictive = CollectionBuilder + .createCollection(context, community) + .build(); + authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_BITSTREAM_READ, admin); + + // Add an item to the permissive collection. + Item item = ItemBuilder + .createItem(context, permissive) + .build(); + + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream()) + .build(); + + Bundle bundle = item.getBundles("ORIGINAL").get(0); + + // Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + // Move the item to the restrictive collection, making sure to inherit default policies. + itemService.move(context, item, permissive, restrictive, true); + + // Verify that the bundle and bitstream's read policies now only allows administrators. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + context.restoreAuthSystemState(); + + } + private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value, String authority, int place, MetadataValue metadataValue) { assertThat(metadataValue.getValue(), equalTo(value)); From 5703dcb0338d21462d6f4204c6da8f6846a6a1ad Mon Sep 17 00:00:00 2001 From: Koen Pauwels Date: Tue, 8 Aug 2023 12:56:10 +0200 Subject: [PATCH 371/686] 104878 Adjust ItemServiceTest to expect correct behavior of bundles when item is migrated --- .../org/dspace/content/service/ItemServiceTest.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java index 16d78a8e3e45..d2f4b6d85197 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java @@ -764,8 +764,8 @@ public void testRemoveItemThatHasRequests() throws Exception { public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws Exception { /* Verify that, if we move an item from a collection with a permissive default item READ policy * to a collection with a restrictive default item READ policy, - * that the item does not retain the original permissive item READ policy. - * However, its bundles and bitstreams do. + * that the item and its bundles do not retain the original permissive item READ policy. + * However, its bitstreams do. */ context.turnOffAuthorisationSystem(); @@ -820,7 +820,7 @@ public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws E .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) ); assertEquals( - List.of(anonymous), + List.of(admin), authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) ); @@ -837,8 +837,8 @@ public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws E public void testMoveItemToCollectionWithMoreRestrictiveBitstreamReadPolicy() throws Exception { /* Verify that, if we move an item from a collection with a permissive default bitstream READ policy * to a collection with a restrictive default bitstream READ policy, - * that the item's bundles and bitstreams do not retain the original permissive READ policy. - * However, the item itself does retain the original policy. + * that the item's bitstreams do not retain the original permissive READ policy. + * However, the item itself and its bundles do retain the original policy. */ context.turnOffAuthorisationSystem(); @@ -893,7 +893,7 @@ public void testMoveItemToCollectionWithMoreRestrictiveBitstreamReadPolicy() thr .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) ); assertEquals( - List.of(admin), + List.of(anonymous), authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) ); From 5fbc9885180332f9b16b7a6c64c3821d322cc622 Mon Sep 17 00:00:00 2001 From: Koen Pauwels Date: Tue, 8 Aug 2023 16:22:49 +0200 Subject: [PATCH 372/686] Fix failing IT in BulkAccessControlIT --- .../org/dspace/app/bulkaccesscontrol/BulkAccessControl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index 50e1022dbe37..7bef232f0450 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -464,7 +464,7 @@ private void setItemPolicies(Item item, BulkAccessControlInput accessControl) .forEach(accessCondition -> createResourcePolicy(item, accessCondition, itemAccessConditions.get(accessCondition.getName()))); - itemService.adjustItemPolicies(context, item, item.getOwningCollection()); + itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false); } /** From 0395112b068d33e59d5bfa3c768316dca486ca46 Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Fri, 25 Aug 2023 19:05:36 +0200 Subject: [PATCH 373/686] copy rightItem uuid only if there is some value to add avoids exception in nCopied method --- .../RelationshipPlacesIndexingServiceImpl.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java index 1ed14b4fbe1f..f29e209d7790 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipPlacesIndexingServiceImpl.java @@ -55,7 +55,9 @@ public void updateRelationReferences(final Context context, final Relationship r if (singleDirectionRelationship("right", relationship.getRelationshipType())) { times = relation.getLeftPlace() - relation.getRightPlace(); } - rightItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getRightItem().getID().toString())); + if (times > 0) { + rightItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getRightItem().getID().toString())); + } } if (!rightItemsIdsToAdd.isEmpty()) { @@ -79,7 +81,9 @@ public void updateRelationReferences(final Context context, final Relationship r if (singleDirectionRelationship("left", relationship.getRelationshipType())) { times = relation.getRightPlace() - relation.getLeftPlace(); } - leftItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getLeftItem().getID().toString())); + if (times > 0) { + leftItemsIdsToAdd.addAll(Collections.nCopies(times, relation.getLeftItem().getID().toString())); + } } if (!leftItemsIdsToAdd.isEmpty()) { @@ -102,7 +106,9 @@ private void addRightItemsReferences(final Context context, final Relationship r if (singleDirectionRelationship("right", relationship.getRelationshipType())) { times = leftItemRelation.getLeftPlace() - leftItemRelation.getRightPlace(); } - rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getRightItem().getID().toString())); + if (times > 0) { + rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getRightItem().getID().toString())); + } } if (!rightItemsToAdd.isEmpty()) { indexingService.updateRelationForItem(leftItem.getID().toString(), @@ -122,7 +128,9 @@ private void addLeftItemsReferences(final Context context, final Relationship re if (singleDirectionRelationship("left", relationship.getRelationshipType())) { times = leftItemRelation.getRightPlace() - leftItemRelation.getLeftPlace(); } - rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getLeftItem().getID().toString())); + if (times > 0) { + rightItemsToAdd.addAll(Collections.nCopies(times, leftItemRelation.getLeftItem().getID().toString())); + } } if (!rightItemsToAdd.isEmpty()) { indexingService.updateRelationForItem(rightItem.getID().toString(), From 362749b3503421f28ee37434a7564086b0ca877c Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Mon, 28 Aug 2023 10:17:03 +0200 Subject: [PATCH 374/686] fix wrong config property breaking tests --- dspace/config/local.cfg.EXAMPLE | 2 +- dspace/config/modules/rest.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace/config/local.cfg.EXAMPLE b/dspace/config/local.cfg.EXAMPLE index 78f5bad26770..e0a1bed1fbb2 100644 --- a/dspace/config/local.cfg.EXAMPLE +++ b/dspace/config/local.cfg.EXAMPLE @@ -243,7 +243,7 @@ db.schema = public # avoid trouble for such browsers (i.e. rest.cors.allowed-origins = ${dspace.ui.url}, https://samltest.id ) #rest.cors.allowed-origins = ${dspace.ui.url} -#rest.cors.bitstream-allowed-origins = ${dspace.ui.url} +#rest.cors.bitstream-allow-origins = ${dspace.ui.url} ################################################# # SPRING BOOT SETTINGS (Used by Server Webapp) # diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 9d2eb77be2cc..76abcb619528 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -134,7 +134,7 @@ rest.regex-clause = text_value ~ ? ##### Customize the REST origins allowed to retrieve the bitstreams ##### ##### default is set to pattern * - use this configuration to restrict/modify this behavior ##### This configuration doens't support the wildcard -bitstream.cors.allowed-origins = +rest.cors.bitstream-allow-origins = ##### Configure REST Report Filters ##### From f005ecb1827b6bcd24211aa812300c680c22b590 Mon Sep 17 00:00:00 2001 From: Damiano Fiorenza Date: Mon, 28 Aug 2023 12:25:51 +0200 Subject: [PATCH 375/686] [DSC-1198] add check to prevent empty metadata values with no authority from creating related entities --- dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java b/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java index eec4412c0c98..3321b09b5471 100644 --- a/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java +++ b/dspace-api/src/main/java/org/dspace/authority/CrisConsumer.java @@ -193,7 +193,7 @@ private boolean isMetadataSkippable(MetadataValue metadata) { return true; } - if (isBlank(authority) && isMetadataWithEmptyAuthoritySkippable(metadata)) { + if (isBlank(authority) && (isBlank(metadata.getValue()) || isMetadataWithEmptyAuthoritySkippable(metadata))) { return true; } From e2a18b15bb29fd830ea75188590122f356684b05 Mon Sep 17 00:00:00 2001 From: Oliver Goldschmidt Date: Mon, 28 Aug 2023 12:39:07 +0200 Subject: [PATCH 376/686] refs #378: Removes map method for SHERPA/RoMEO externalproviders configuration --- .../java/org/dspace/content/authority/SherpaAuthority.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java index 44bd406ce43c..54d8f3325ceb 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SherpaAuthority.java @@ -173,9 +173,4 @@ private boolean isLocalItemChoicesEnabled() { return configurationService.getBooleanProperty("cris." + this.authorityName + ".local-item-choices-enabled"); } - @Override - public Map getExternalSource() { - return Map.of(); - } - } \ No newline at end of file From c67d4b887be21cf38be839e882ee1e0159fa3d2b Mon Sep 17 00:00:00 2001 From: Oliver Goldschmidt Date: Wed, 30 Aug 2023 10:40:58 +0200 Subject: [PATCH 377/686] fixes #380: reset information about modified or added items in subscription context --- .../subscriptions/SubscriptionEmailNotificationServiceImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java index 2a30b89af3f5..ab19139e7f9c 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java @@ -117,6 +117,7 @@ public void perform(Context context, DSpaceRunnableHandler handler, String subsc collectionsItems, items); communityItems.clear(); collectionsItems.clear(); + items.clear(); } } else { //in the end of the iteration From 997057f8e4c3b48092e41534dd99535b109b214a Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Wed, 6 Sep 2023 19:48:55 +0200 Subject: [PATCH 378/686] 103837: Fix isContentBitstream() in GoogleAsyncEventListener --- .../main/java/org/dspace/google/GoogleAsyncEventListener.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java index cb9a120fd08f..c1c59acf4a63 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java @@ -182,7 +182,7 @@ private String getDocumentPath(HttpServletRequest request) { private boolean isContentBitstream(UsageEvent usageEvent) { // check if event is a VIEW event and object is a Bitstream if (usageEvent.getAction() == UsageEvent.Action.VIEW - || usageEvent.getObject().getType() == Constants.BITSTREAM) { + && usageEvent.getObject().getType() == Constants.BITSTREAM) { // check if bitstream belongs to a configured bundle List allowedBundles = List.of(configurationService .getArrayProperty("google-analytics.bundles", new String[]{Constants.CONTENT_BUNDLE_NAME})); From 1b769de42a6916d7d4ee61dcfb95341abccbd45e Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 28 Jun 2023 10:36:30 -0400 Subject: [PATCH 379/686] Rename test suites that are really integration testing. (cherry picked from commit f66ca33b0627c1b0789c9c3ce407463f5dc3356e) --- ...sswordValidatorTest.java => RegexPasswordValidatorIT.java} | 2 +- ...ningTest.java => RelationshipServiceImplVersioningIT.java} | 2 +- ...ationshipsTest.java => VersioningWithRelationshipsIT.java} | 2 +- ...elationshipDAOImplTest.java => RelationshipDAOImplIT.java} | 4 ++-- ...hipTypeDAOImplTest.java => RelationshipTypeDAOImplIT.java} | 4 ++-- .../service/{ItemServiceTest.java => ItemServiceIT.java} | 4 ++-- ...iderTest.java => VersionedHandleIdentifierProviderIT.java} | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) rename dspace-api/src/test/java/org/dspace/authorize/{RegexPasswordValidatorTest.java => RegexPasswordValidatorIT.java} (97%) rename dspace-api/src/test/java/org/dspace/content/{RelationshipServiceImplVersioningTest.java => RelationshipServiceImplVersioningIT.java} (99%) rename dspace-api/src/test/java/org/dspace/content/{VersioningWithRelationshipsTest.java => VersioningWithRelationshipsIT.java} (99%) rename dspace-api/src/test/java/org/dspace/content/dao/{RelationshipDAOImplTest.java => RelationshipDAOImplIT.java} (98%) rename dspace-api/src/test/java/org/dspace/content/dao/{RelationshipTypeDAOImplTest.java => RelationshipTypeDAOImplIT.java} (98%) rename dspace-api/src/test/java/org/dspace/content/service/{ItemServiceTest.java => ItemServiceIT.java} (99%) rename dspace-api/src/test/java/org/dspace/identifier/{VersionedHandleIdentifierProviderTest.java => VersionedHandleIdentifierProviderIT.java} (97%) diff --git a/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java similarity index 97% rename from dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java rename to dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java index df333fa500c9..7286fb8e8374 100644 --- a/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java +++ b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java @@ -26,7 +26,7 @@ * @author Luca Giamminonni (luca.giamminonni at 4science.it) */ @RunWith(MockitoJUnitRunner.class) -public class RegexPasswordValidatorTest extends AbstractIntegrationTest { +public class RegexPasswordValidatorIT extends AbstractIntegrationTest { @Mock private ConfigurationService configurationService; diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java similarity index 99% rename from dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java rename to dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java index d42213da2cf8..1b6f23032d57 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java @@ -26,7 +26,7 @@ import org.junit.Before; import org.junit.Test; -public class RelationshipServiceImplVersioningTest extends AbstractIntegrationTestWithDatabase { +public class RelationshipServiceImplVersioningIT extends AbstractIntegrationTestWithDatabase { private RelationshipService relationshipService; private RelationshipDAO relationshipDAO; diff --git a/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java similarity index 99% rename from dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java rename to dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java index 528568c4e5fb..44653300e0de 100644 --- a/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java +++ b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java @@ -70,7 +70,7 @@ import org.junit.Test; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; -public class VersioningWithRelationshipsTest extends AbstractIntegrationTestWithDatabase { +public class VersioningWithRelationshipsIT extends AbstractIntegrationTestWithDatabase { private final RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java similarity index 98% rename from dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java rename to dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java index b6f5da6be065..2d08223b2e3e 100644 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java @@ -39,9 +39,9 @@ * Created by: Andrew Wood * Date: 20 Sep 2019 */ -public class RelationshipDAOImplTest extends AbstractIntegrationTest { +public class RelationshipDAOImplIT extends AbstractIntegrationTest { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplTest.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplIT.class); private Relationship relationship; diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java similarity index 98% rename from dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java rename to dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java index 3fff6fec4762..ff7d03b49f6d 100644 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java @@ -35,9 +35,9 @@ import org.junit.Before; import org.junit.Test; -public class RelationshipTypeDAOImplTest extends AbstractIntegrationTest { +public class RelationshipTypeDAOImplIT extends AbstractIntegrationTest { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplTest.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplIT.class); private Relationship relationship; diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java similarity index 99% rename from dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java rename to dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java index 50b4d3f3b48e..e40577ef36ea 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java @@ -54,8 +54,8 @@ import org.junit.Before; import org.junit.Test; -public class ItemServiceTest extends AbstractIntegrationTestWithDatabase { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceTest.class); +public class ItemServiceIT extends AbstractIntegrationTestWithDatabase { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceIT.class); protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java similarity index 97% rename from dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java rename to dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java index 1bc6bf140832..7e549f6cae33 100644 --- a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java +++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java @@ -27,7 +27,7 @@ import org.junit.Before; import org.junit.Test; -public class VersionedHandleIdentifierProviderTest extends AbstractIntegrationTestWithDatabase { +public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTestWithDatabase { private ServiceManager serviceManager; private IdentifierServiceImpl identifierService; From 732314018da5f6f7b5e7d55f9aab7f4a4c53218c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 22:40:56 +0000 Subject: [PATCH 380/686] Bump org.eclipse.jetty:jetty-xml Bumps [org.eclipse.jetty:jetty-xml](https://github.com/eclipse/jetty.project) from 9.4.51.v20230217 to 9.4.52.v20230823. - [Release notes](https://github.com/eclipse/jetty.project/releases) - [Commits](https://github.com/eclipse/jetty.project/compare/jetty-9.4.51.v20230217...jetty-9.4.52.v20230823) --- updated-dependencies: - dependency-name: org.eclipse.jetty:jetty-xml dependency-type: direct:production ... Signed-off-by: dependabot[bot] (cherry picked from commit 2bcc0b38a9436b0abc4c54e419f0fa6ae194269c) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d2b3b3428856..19bf52de855e 100644 --- a/pom.xml +++ b/pom.xml @@ -37,7 +37,7 @@ 2.3.1 1.1.0 - 9.4.51.v20230217 + 9.4.52.v20230823 2.20.0 2.0.28 1.19.0 From ff393fe72d9b218734e4efb0ac8266f222d8da62 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 8 Sep 2023 14:01:25 -0500 Subject: [PATCH 381/686] Add note that rebooting Tomcat required --- dspace/config/dspace.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 61d6fb589a60..fda7a6e57124 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1160,7 +1160,8 @@ webui.browse.index.4 = subject:metadata:dc.subject.*:text # By default, browse hierarchical indexes are created based on the used controlled # vocabularies in the submission forms. These could be disabled adding the name of -# the vocabularies to exclude in this comma-separated property: +# the vocabularies to exclude in this comma-separated property. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) # webui.browse.vocabularies.disabled = srsc # Enable/Disable tag cloud in browsing. From ba197ef611a8f8e768db04afa5ce0e080010d558 Mon Sep 17 00:00:00 2001 From: Hrafn Malmquist Date: Sat, 9 Sep 2023 00:53:11 +0100 Subject: [PATCH 382/686] Add websvc.opensearch.autolink and websvc.opensearch.shortname to exposed REST configuration properties (cherry picked from commit 80b35c9650ec2f40bea3b497b65ce77d0c97bfcf) --- dspace/config/modules/rest.cfg | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 657e02b58de7..b08f8d514536 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -36,7 +36,6 @@ rest.patch.operations.limit = 1000 # (Requires reboot of servlet container, e.g. Tomcat, to reload) rest.properties.exposed = plugin.named.org.dspace.curate.CurationTask rest.properties.exposed = google.analytics.key -rest.properties.exposed = websvc.opensearch.enable rest.properties.exposed = versioning.item.history.include.submitter rest.properties.exposed = researcher-profile.entity-type rest.properties.exposed = orcid.application-client-id @@ -44,7 +43,10 @@ rest.properties.exposed = orcid.authorize-url rest.properties.exposed = orcid.scope rest.properties.exposed = orcid.disconnection.allowed-users rest.properties.exposed = registration.verification.enabled +rest.properties.exposed = websvc.opensearch.enable rest.properties.exposed = websvc.opensearch.svccontext +rest.properties.exposed = websvc.opensearch.shortname +rest.properties.exposed = websvc.opensearch.autolink rest.properties.exposed = submit.type-bind.field rest.properties.exposed = google.recaptcha.key.site rest.properties.exposed = google.recaptcha.version From f036467aa3dcefb884181e46111da535703ce316 Mon Sep 17 00:00:00 2001 From: Mark Cooper Date: Fri, 5 May 2023 14:04:08 -0700 Subject: [PATCH 383/686] Add a "container friendly" log4j2 cfg and output compose dspace log to console (cherry picked from commit 9eefd56cd7013730ff43969b321d6cbee2a3ae64) --- docker-compose.yml | 1 + dspace/config/log4j2-container.xml | 65 ++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 dspace/config/log4j2-container.xml diff --git a/docker-compose.yml b/docker-compose.yml index 36ba6af2c981..6c1615040722 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,6 +28,7 @@ services: # proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests # from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above. proxies__P__trusted__P__ipranges: '172.23.0' + LOGGING_CONFIG: /dspace/config/log4j2-container.xml image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}" build: context: . diff --git a/dspace/config/log4j2-container.xml b/dspace/config/log4j2-container.xml new file mode 100644 index 000000000000..9fd358c72a1f --- /dev/null +++ b/dspace/config/log4j2-container.xml @@ -0,0 +1,65 @@ + + + + + + INFO + INFO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From d965057854579069aa118df7801fdaf0af9a04d0 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 13 Sep 2023 11:47:55 -0500 Subject: [PATCH 384/686] Fix ClassCastException (Collection cannot be cast to Item) in Handle identifier classes (cherry picked from commit 1271374d37a3f7d9cec270e54ec106895aa934bc) --- .../identifier/HandleIdentifierProvider.java | 3 +- ...dentifierProviderWithCanonicalHandles.java | 65 ++++++++++--------- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java index 1ded40c8f8a4..59a1e13a2166 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java @@ -88,8 +88,7 @@ public void register(Context context, DSpaceObject dso, String identifier) { try { handleService.createHandle(context, dso, identifier); if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { - Item item = (Item) dso; - populateHandleMetadata(context, item, identifier); + populateHandleMetadata(context, dso, identifier); } } catch (IOException | IllegalStateException | SQLException | AuthorizeException e) { log.error(LogHelper.getHeader(context, diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 78ad6b7b79bb..bfa331319911 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -180,45 +180,46 @@ public String register(Context context, DSpaceObject dso) { @Override public void register(Context context, DSpaceObject dso, String identifier) { try { - - Item item = (Item) dso; - - // if for this identifier is already present a record in the Handle table and the corresponding item - // has an history someone is trying to restore the latest version for the item. When - // trying to restore the latest version the identifier in input doesn't have the for 1234/123.latestVersion - // it is the canonical 1234/123 - VersionHistory itemHistory = getHistory(context, identifier); - if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { - - int newVersionNumber = versionHistoryService.getLatestVersion(context, itemHistory) - .getVersionNumber() + 1; - String canonical = identifier; - identifier = identifier.concat(".").concat("" + newVersionNumber); - restoreItAsVersion(context, dso, identifier, item, canonical, itemHistory); - } else if (identifier.matches(".*/.*\\.\\d+")) { - // if identifier == 1234.5/100.4 reinstate the version 4 in the version table if absent - - // if it is a version of an item is needed to put back the record - // in the versionitem table - String canonical = getCanonical(identifier); - DSpaceObject canonicalItem = this.resolve(context, canonical); - if (canonicalItem == null) { - restoreItAsCanonical(context, dso, identifier, item, canonical); - } else { - VersionHistory history = versionHistoryService.findByItem(context, (Item) canonicalItem); - if (history == null) { + if (dso instanceof Item) { + Item item = (Item) dso; + // if for this identifier is already present a record in the Handle table and the corresponding item + // has an history someone is trying to restore the latest version for the item. When + // trying to restore the latest version the identifier in input doesn't have the for 1234/123.latestVersion + // it is the canonical 1234/123 + VersionHistory itemHistory = getHistory(context, identifier); + if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { + + int newVersionNumber = versionHistoryService.getLatestVersion(context, itemHistory) + .getVersionNumber() + 1; + String canonical = identifier; + identifier = identifier.concat(".").concat("" + newVersionNumber); + restoreItAsVersion(context, dso, identifier, item, canonical, itemHistory); + } else if (identifier.matches(".*/.*\\.\\d+")) { + // if identifier == 1234.5/100.4 reinstate the version 4 in the version table if absent + + // if it is a version of an item is needed to put back the record + // in the versionitem table + String canonical = getCanonical(identifier); + DSpaceObject canonicalItem = this.resolve(context, canonical); + if (canonicalItem == null) { restoreItAsCanonical(context, dso, identifier, item, canonical); } else { - restoreItAsVersion(context, dso, identifier, item, canonical, history); + VersionHistory history = versionHistoryService.findByItem(context, (Item) canonicalItem); + if (history == null) { + restoreItAsCanonical(context, dso, identifier, item, canonical); + } else { + restoreItAsVersion(context, dso, identifier, item, canonical, history); + } } + } else { + // A regular handle to create for an Item + createNewIdentifier(context, dso, identifier); + modifyHandleMetadata(context, item, getCanonical(identifier)); } } else { - //A regular handle + // Handle being registered for a different type of object (e.g. Collection or Community) createNewIdentifier(context, dso, identifier); - if (dso instanceof Item) { - modifyHandleMetadata(context, item, getCanonical(identifier)); - } } } catch (IOException | SQLException | AuthorizeException e) { log.error(LogHelper.getHeader(context, From 42e2e4b0c88241e1674f54d9dc4b5668772e0c9b Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 13 Sep 2023 12:28:55 -0500 Subject: [PATCH 385/686] Fix checkstyle. Correct grammar of comment while doing so. (cherry picked from commit ffa2683c632cfab59b0432c203434ac15a6eb85f) --- ...ionedHandleIdentifierProviderWithCanonicalHandles.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index bfa331319911..e6a092c47284 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -182,10 +182,10 @@ public void register(Context context, DSpaceObject dso, String identifier) { try { if (dso instanceof Item) { Item item = (Item) dso; - // if for this identifier is already present a record in the Handle table and the corresponding item - // has an history someone is trying to restore the latest version for the item. When - // trying to restore the latest version the identifier in input doesn't have the for 1234/123.latestVersion - // it is the canonical 1234/123 + // if this identifier is already present in the Handle table and the corresponding item + // has a history, then someone is trying to restore the latest version for the item. When + // trying to restore the latest version, the identifier in input doesn't have the + // 1234/123.latestVersion. Instead, it is the canonical 1234/123 VersionHistory itemHistory = getHistory(context, identifier); if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { From 7b2b8db51211163eea740c9d2d7b1d3bfa70a258 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Thu, 24 Aug 2023 13:03:26 -0400 Subject: [PATCH 386/686] Avoid double slashes in sitemap paths. (cherry picked from commit eae4463eaa5916bd9b20f4e4132398aceeba1f02) --- .../org/dspace/app/sitemap/GenerateSitemaps.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index d65447d311ee..400b5ecb87cb 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -189,7 +189,10 @@ public static void deleteSitemaps() throws IOException { */ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException { String uiURLStem = configurationService.getProperty("dspace.ui.url"); - String sitemapStem = uiURLStem + "/sitemap"; + if (!uiURLStem.endsWith("/")) { + uiURLStem = uiURLStem + '/'; + } + String sitemapStem = uiURLStem + "sitemap"; File outputDir = new File(configurationService.getProperty("sitemap.dir")); if (!outputDir.exists() && !outputDir.mkdir()) { @@ -212,7 +215,7 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) List comms = communityService.findAll(c); for (Community comm : comms) { - String url = uiURLStem + "/communities/" + comm.getID(); + String url = uiURLStem + "communities/" + comm.getID(); if (makeHTMLMap) { html.addURL(url, null); @@ -227,7 +230,7 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) List colls = collectionService.findAll(c); for (Collection coll : colls) { - String url = uiURLStem + "/collections/" + coll.getID(); + String url = uiURLStem + "collections/" + coll.getID(); if (makeHTMLMap) { html.addURL(url, null); @@ -259,11 +262,11 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) && StringUtils.isNotBlank(discoverResult.getSearchDocument( discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0)) ) { - url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument( + url = uiURLStem + "entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument( discoverResult.getIndexableObjects().get(0)) .get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID(); } else { - url = uiURLStem + "/items/" + i.getID(); + url = uiURLStem + "items/" + i.getID(); } Date lastMod = i.getLastModified(); From e757b9d9f4d70e8e395d32f32fbe846b49816074 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 13 Sep 2023 10:15:07 -0500 Subject: [PATCH 387/686] Remove 'cross join' from count query. Updates "countHandlesByPrefix" to use a query similar to existing "findByPrefix" (cherry picked from commit 14223bd712ce91cf97096f2201924baea8456814) --- .../main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java b/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java index 3bd702bf809c..71bb798ae387 100644 --- a/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java @@ -90,13 +90,11 @@ public List findByPrefix(Context context, String prefix) throws SQLExcep @Override public long countHandlesByPrefix(Context context, String prefix) throws SQLException { - - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Handle.class); Root handleRoot = criteriaQuery.from(Handle.class); - criteriaQuery.select(criteriaBuilder.count(criteriaQuery.from(Handle.class))); + criteriaQuery.select(handleRoot); criteriaQuery.where(criteriaBuilder.like(handleRoot.get(Handle_.handle), prefix + "%")); return countLong(context, criteriaQuery, criteriaBuilder, handleRoot); } From 8271c4d07b772f551ee286e50643a2839bbfa480 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Mon, 18 Sep 2023 15:34:23 -0500 Subject: [PATCH 388/686] fix logical bug when checking if field is controlled authority --- .../src/main/java/org/dspace/app/bulkedit/MetadataImport.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java index 4161bbb4d817..9044c723ff53 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java @@ -1363,7 +1363,7 @@ private int displayChanges(List changes, boolean changed) { * is the field is defined as authority controlled */ private static boolean isAuthorityControlledField(String md) { - String mdf = StringUtils.substringAfter(md, ":"); + String mdf = md.contains(":") ? StringUtils.substringAfter(md, ":") : md; mdf = StringUtils.substringBefore(mdf, "["); return authorityControlled.contains(mdf); } From 92a1e4c7c0543bfd26d610aa6d828858794ce116 Mon Sep 17 00:00:00 2001 From: eskander Date: Tue, 19 Sep 2023 14:28:48 +0300 Subject: [PATCH 389/686] [DSC-1247] Allow parameter for executing user to CLI processes (bulk-import) --- .../org/dspace/app/bulkedit/BulkImport.java | 9 +- .../dspace/app/bulkedit/BulkImportCli.java | 27 +++ .../BulkImportCliScriptConfiguration.java | 10 ++ .../BulkImportScriptConfiguration.java | 6 +- .../org/dspace/app/bulkedit/BulkImportIT.java | 163 +++++++++++++----- 5 files changed, 160 insertions(+), 55 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java index 491039cff835..235e7806487a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImport.java @@ -258,7 +258,7 @@ public void setup() throws ParseException { collectionId = commandLine.getOptionValue('c'); filename = commandLine.getOptionValue('f'); - if (commandLine.hasOption('e')) { + if (commandLine.hasOption("er")) { abortOnError = true; } } @@ -266,11 +266,9 @@ public void setup() throws ParseException { @Override public void internalRun() throws Exception { context = new Context(Context.Mode.BATCH_EDIT); - assignCurrentUserInContext(); + assignCurrentUserInContext(context); assignSpecialGroupsInContext(); - context.turnOffAuthorisationSystem(); - InputStream inputStream = handler.getFileStream(context, filename) .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + "found for filename: " + filename)); @@ -285,6 +283,7 @@ public void internalRun() throws Exception { } try { + context.turnOffAuthorisationSystem(); performImport(inputStream); context.complete(); context.restoreAuthSystemState(); @@ -1601,7 +1600,7 @@ private void rollback() { } } - private void assignCurrentUserInContext() throws SQLException { + protected void assignCurrentUserInContext(Context context) throws SQLException, ParseException { UUID uuid = getEpersonIdentifier(); if (uuid != null) { EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java index 36da59c7a252..c1399c61413b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCli.java @@ -7,6 +7,13 @@ */ package org.dspace.app.bulkedit; +import java.sql.SQLException; + +import org.apache.commons.cli.ParseException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; + /** * Extension of {@link BulkImport} for CLI. * @@ -15,4 +22,24 @@ */ public class BulkImportCli extends BulkImport { + @Override + protected void assignCurrentUserInContext(Context context) throws ParseException { + if (commandLine.hasOption('e')) { + String ePersonEmail = commandLine.getOptionValue('e'); + try { + EPerson ePerson = + EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, ePersonEmail); + if (ePerson == null) { + super.handler.logError("EPerson not found: " + ePersonEmail); + throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail); + } + context.setCurrentUser(ePerson); + } catch (SQLException e) { + throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail); + } + } else { + throw new ParseException("Required parameter -e missing!"); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java index f79c03e041e2..5e34f6a58464 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportCliScriptConfiguration.java @@ -7,6 +7,8 @@ */ package org.dspace.app.bulkedit; +import org.apache.commons.cli.Options; + /** * Extension of {@link BulkImportScriptConfiguration} for CLI. * @@ -15,5 +17,13 @@ */ public class BulkImportCliScriptConfiguration extends BulkImportScriptConfiguration { + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("e", "email", true, "email address of user"); + options.getOption("e").setRequired(true); + super.options = options; + return options; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java index e2fd7bacd0e1..3530687bf36f 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/BulkImportScriptConfiguration.java @@ -52,9 +52,9 @@ public Options getOptions() { options.getOption("f").setType(InputStream.class); options.getOption("f").setRequired(true); - options.addOption("e", "concludeOnError", false, "conclude the import at the first error"); - options.getOption("e").setType(boolean.class); - options.getOption("e").setRequired(false); + options.addOption("er", "concludeOnError", false, "conclude the import at the first error"); + options.getOption("er").setType(boolean.class); + options.getOption("er").setRequired(false); super.options = options; } diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java index e03c414a034c..e1d04f314c68 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/BulkImportIT.java @@ -133,7 +133,8 @@ public void beforeTests() throws SQLException, AuthorizeException { public void testEmptyImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("empty.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -149,7 +150,8 @@ public void testEmptyImport() throws InstantiationException, IllegalAccessExcept public void testEmptyHeadersImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("empty-headers.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -165,7 +167,8 @@ public void testEmptyHeadersImport() throws InstantiationException, IllegalAcces public void testOneHeaderEmptyImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("one-header-empty.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -182,7 +185,8 @@ public void testOneHeaderEmptyImport() throws InstantiationException, IllegalAcc public void testWithoutHeadersImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("without-headers.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -198,7 +202,8 @@ public void testWithoutHeadersImport() throws InstantiationException, IllegalAcc public void testInvalidHeadersImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("invalid-headers.xls"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -216,7 +221,8 @@ public void testInvalidHeadersImport() throws InstantiationException, IllegalAcc public void testInvalidSheetNameImport() throws InstantiationException, IllegalAccessException { String fileLocation = getXlsFilePath("invalid-sheet-name.xlsx"); - String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -241,7 +247,8 @@ public void testMetadataGroupRowWithManyValuesImport() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("metadata-group-row-with-many-values.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -275,7 +282,8 @@ public void testHeadersDuplicatedImport() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("headers-duplicated.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -300,7 +308,8 @@ public void testCreatePatent() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -351,7 +360,8 @@ public void testUpdatePatent() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -388,7 +398,8 @@ public void testCreatePublicationWithAuthority() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-authority.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -439,7 +450,8 @@ public void testManyPublicationImport() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("many-publications.xls"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -515,7 +527,8 @@ public void testManyPublicationImportWithErrorAndNotAbortOnError() throws Except context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("many-publications.xls"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -575,7 +588,8 @@ public void testManyPublicationImportWithErrorAndAbortOnError() throws Exception context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("many-publications.xls"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -630,7 +644,8 @@ public void testCreatePublicationWithOneInvalidAuthorityAndNoAbortOnError() thro context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-one-invalid-authority.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -675,7 +690,8 @@ public void testCreatePublicationWithOneInvalidAuthorityAndAbortOnError() throws context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-one-invalid-authority.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -714,7 +730,8 @@ public void testCreatePublicationWithWillBeGeneratedAuthority() throws Exception String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-generated-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -760,7 +777,8 @@ public void testCreatePublicationWithWillBeGeneratedAuthorityAndNoRelatedItemFou String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-generated-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -810,7 +828,8 @@ public void testCreatePublicationWithWillBeReferencedAuthority() throws Exceptio String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-referenced-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -856,7 +875,8 @@ public void testCreatePublicationWithWillBeReferencedAuthorityAndNoRelatedItemFo String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-referenced-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -898,7 +918,8 @@ public void testCreatePublicationInWorkspace() throws Exception { String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-workspace-publication.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -942,7 +963,8 @@ public void testCreateArchivedPublication() throws Exception { String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-archived-publication.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e", eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -995,7 +1017,8 @@ public void testUpdateWorkflowPatentWithValidWorkspaceItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-workflow-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1045,7 +1068,8 @@ public void testUpdateWorkflowPatentWithInvalidWorkspaceItem() throws Exception context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-workflow-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1097,7 +1121,8 @@ public void testUpdateWorkflowPatentWithoutWorkspaceItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-workflow-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1146,7 +1171,8 @@ public void testUpdateArchivePatentWithWorkspaceItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-archive-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1195,7 +1221,8 @@ public void testUpdateArchivePatentWithWorkflowItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-archive-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1244,7 +1271,8 @@ public void testUpdateArchivePatentWithAlreadyArchivedItem() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-archive-patent.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1295,7 +1323,8 @@ public void testAutomaticReferenceResolution() throws Exception { String publicationCollectionId = publications.getID().toString(); String fileLocation = getXlsFilePath("create-publication-with-will-be-referenced-authority.xls"); - String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, "-e" }; + String[] args = new String[] { "bulk-import", "-c", publicationCollectionId, "-f", fileLocation, + "-e" , eperson.getEmail(), "-er"}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -1316,7 +1345,8 @@ public void testAutomaticReferenceResolution() throws Exception { String personsCollectionId = persons.getID().toString(); fileLocation = getXlsFilePath("create-person.xls"); - args = new String[] { "bulk-import", "-c", personsCollectionId, "-f", fileLocation, "-e" }; + args = new String[] { "bulk-import", "-c", personsCollectionId, "-f", fileLocation, + "-e" , eperson.getEmail(), "-er"}; handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); @@ -1350,7 +1380,8 @@ public void testUploadSingleBitstream() throws Exception { String fileLocation = getXlsFilePath("add-bitstream-to-item.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1404,7 +1435,8 @@ public void testUploadMultipleBitstreams() throws Exception { String fileLocation = getXlsFilePath("add-multiple-bitstreams-to-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1458,7 +1490,8 @@ public void testUploadMultipleBitstreamWithPathTraversal() throws Exception { String fileLocation = getXlsFilePath("add-multiple-bitstreams-with-path-traversal-to-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1509,7 +1542,8 @@ public void testUploadSingleBitstreamUpdate() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("add-bitstream-to-item-update.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1555,7 +1589,8 @@ public void testUploadMultipleBitstreamsUpdateMultiple() throws Exception { String fileName = "add-bitstream-to-multiple-items-update.xls"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1602,7 +1637,8 @@ public void testUploadSingleBitstreamUpdateWithExistingBundle() throws Exception String fileName = "add-bitstream-to-item-bundle.xls"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1639,7 +1675,8 @@ public void testCreatePublicationInWorkspaceItemsAndItemHasLicense() throws Exce context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("items-with-bitstreams.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1702,7 +1739,8 @@ public void testCreatePublicationInWorkspaceItemsWithBitstreams() throws Excepti String fileName = "items-with-bitstreams.xlsx"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1786,7 +1824,8 @@ public void testUpdateAndDeleteBitstreamsOfItems() throws Exception { String fileName = "update-delete-bitstreams-of-items.xls"; String fileLocation = getXlsFilePath(fileName); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1855,7 +1894,8 @@ public void testBitstreamUpdateAndDeleteWithWrongPosition() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-delete-bitstreams-of-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1918,7 +1958,8 @@ public void testBitstreamUpdateWithAdditionalConditionSetToFalse() throws Except context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-bitstream-policies-without-additional-ac.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -1981,7 +2022,8 @@ public void testUpdateItems() throws Exception { // start test String fileLocation = getXlsFilePath("update-items.xls"); - String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publication.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); assertThat(handler.getErrorMessages(), empty()); @@ -2031,7 +2073,8 @@ public void testCreatePublicationWithSecurityLevel() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("create-publication-with-security-level.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2081,7 +2124,8 @@ public void testUpdatePublicationWithSecurityLevel() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("update-publication-with-security-level.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2121,7 +2165,8 @@ public void testWorkbookWithoutActionColumn() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("without-action-column.xls"); - String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2182,7 +2227,8 @@ public void testWorkbookWithDiscoverableColumn() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("publications_with_discoverable_column.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2237,7 +2283,8 @@ public void testWorkbookWithInvalidOptionalColumnPosition() throws Exception { context.restoreAuthSystemState(); String fileLocation = getXlsFilePath("invalid-optional-column-position.xlsx"); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); @@ -2246,6 +2293,28 @@ public void testWorkbookWithInvalidOptionalColumnPosition() throws Exception { + "must be placed before the metadata fields")); } + @Test + public void testCreatePatentByNotCollectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + Collection patents = createCollection(context, community) + .withSubmissionDefinition("patent") + .withAdminGroup(admin) + .build(); + context.commit(); + context.restoreAuthSystemState(); + + String fileLocation = getXlsFilePath("create-patent.xls"); + String[] args = new String[] { "bulk-import", "-c", patents.getID().toString(), "-f", fileLocation, + "-e", eperson.getEmail()}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); + + List errorMessages = handler.getErrorMessages(); + assertThat("Expected 1 error message", errorMessages, hasSize(1)); + assertThat(errorMessages.get(0), containsString("The user is not an admin of the given collection")); + } + private WorkspaceItem findWorkspaceItem(Item item) throws SQLException { return workspaceItemService.findByItem(context, item); } From 2667b8d187c108a37ddcacfed1bf4d3dbd6f47cf Mon Sep 17 00:00:00 2001 From: nwoodward Date: Tue, 19 Sep 2023 15:43:29 -0500 Subject: [PATCH 390/686] remove optimize option from oai import --- .../src/main/java/org/dspace/xoai/app/XOAI.java | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java index e27a3ee947cb..4f842b8e944c 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java @@ -85,7 +85,6 @@ public class XOAI { // needed because the solr query only returns 10 rows by default private final Context context; - private boolean optimize; private final boolean verbose; private boolean clean; @@ -122,9 +121,8 @@ private List getFileFormats(Item item) { return formats; } - public XOAI(Context context, boolean optimize, boolean clean, boolean verbose) { + public XOAI(Context context, boolean clean, boolean verbose) { this.context = context; - this.optimize = optimize; this.clean = clean; this.verbose = verbose; @@ -173,12 +171,6 @@ public int index() throws DSpaceSolrIndexerException { } solrServerResolver.getServer().commit(); - if (optimize) { - println("Optimizing Index"); - solrServerResolver.getServer().optimize(); - println("Index optimized"); - } - // Set last compilation date xoaiLastCompilationCacheService.put(new Date()); return result; @@ -586,7 +578,6 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio CommandLineParser parser = new DefaultParser(); Options options = new Options(); options.addOption("c", "clear", false, "Clear index before indexing"); - options.addOption("o", "optimize", false, "Optimize index at the end"); options.addOption("v", "verbose", false, "Verbose output"); options.addOption("h", "help", false, "Shows some help"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); @@ -620,7 +611,7 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio if (COMMAND_IMPORT.equals(command)) { ctx = new Context(Context.Mode.READ_ONLY); - XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v')); + XOAI indexer = new XOAI(ctx, line.hasOption('c'), line.hasOption('v')); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); @@ -706,7 +697,6 @@ private static void usage() { System.out.println(" " + COMMAND_IMPORT + " - To import DSpace items into OAI index and cache system"); System.out.println(" " + COMMAND_CLEAN_CACHE + " - Cleans the OAI cached responses"); System.out.println("> Parameters:"); - System.out.println(" -o Optimize index after indexing (" + COMMAND_IMPORT + " only)"); System.out.println(" -c Clear index (" + COMMAND_IMPORT + " only)"); System.out.println(" -v Verbose output"); System.out.println(" -h Shows this text"); From 8027d3b3055e6b52dc28876a0d90c1a60d45a692 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Tue, 19 Sep 2023 16:03:22 -0500 Subject: [PATCH 391/686] added authorization check for license bitstream in OAI import --- .../java/org/dspace/xoai/util/ItemUtils.java | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 35bef8c8d77f..b32983581321 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -21,6 +21,8 @@ import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.MetadataExposureService; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -59,6 +61,10 @@ public class ItemUtils { private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private static final AuthorizeService authorizeService + = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + /** * Default constructor */ @@ -163,13 +169,17 @@ private static Element createLicenseElement(Context context, Item item) List licBits = licBundle.getBitstreams(); if (!licBits.isEmpty()) { Bitstream licBit = licBits.get(0); - InputStream in; - - in = bitstreamService.retrieve(context, licBit); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - Utils.bufferedCopy(in, out); - license.getField().add(createValue("bin", Base64Utils.encode(out.toString()))); - + if (authorizeService.authorizeActionBoolean(context, licBit, Constants.READ)) { + InputStream in; + + in = bitstreamService.retrieve(context, licBit); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + Utils.bufferedCopy(in, out); + license.getField().add(createValue("bin", Base64Utils.encode(out.toString()))); + } else { + log.info("Missing READ rights for license bitstream. Did not include license bitstream for item: " + + item.getID() + "."); + } } } return license; From 052cc10e7749f104caddea161b82cbd004be360c Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 22 Sep 2023 17:50:29 +0300 Subject: [PATCH 392/686] [CST-11738] TUHH: ORCID lookup with more data --- dspace/config/modules/discovery.cfg | 1 + .../spring/rest/cris-authority-metadatagenerator.xml | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/dspace/config/modules/discovery.cfg b/dspace/config/modules/discovery.cfg index ddf01769952c..bdaa7b6042c5 100644 --- a/dspace/config/modules/discovery.cfg +++ b/dspace/config/modules/discovery.cfg @@ -28,6 +28,7 @@ discovery.index.projection=dc.title discovery.index.projection=dc.contributor.* discovery.index.projection=dc.date.issued discovery.index.projection=person.affiliation.name +discovery.index.projection=person.identifier.orcid # Allow auto-reindexing. # If any database migrations are applied to your database (via Flyway), then a diff --git a/dspace/config/spring/rest/cris-authority-metadatagenerator.xml b/dspace/config/spring/rest/cris-authority-metadatagenerator.xml index 246e41093a36..e38b9c7beda0 100644 --- a/dspace/config/spring/rest/cris-authority-metadatagenerator.xml +++ b/dspace/config/spring/rest/cris-authority-metadatagenerator.xml @@ -30,6 +30,17 @@
    + + + + + + + + + + + From c711d2d9448b970b283e71d2fa1210c51194d7f9 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 28 Jun 2023 19:10:37 +0100 Subject: [PATCH 393/686] OAI: add support to extract embargo from bitstreams and expose it in OAI metadata (cherry picked from commit db81d758a947a9bdbb63fea9e872bc9b52a377ff) --- .../java/org/dspace/xoai/util/ItemUtils.java | 32 +++++++++++++++++++ .../oai/metadataFormats/uketd_dc.xsl | 5 +++ 2 files changed, 37 insertions(+) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 35bef8c8d77f..1733440afd95 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -11,6 +11,8 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.List; import com.lyncode.xoai.dataprovider.xml.xoai.Element; @@ -21,6 +23,9 @@ import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.MetadataExposureService; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -34,6 +39,9 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.Utils; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.xoai.data.DSpaceItem; @@ -57,6 +65,9 @@ public class ItemUtils { private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + private static final AuthorizeService authorizeService = + AuthorizeServiceFactory.getInstance().getAuthorizeService(); + private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); /** @@ -141,6 +152,9 @@ private static Element createBundlesElement(Context context, Item item) throws S if (description != null) { bitstream.getField().add(createValue("description", description)); } + // Add bitstream embargo information (READ policy present, for Anonymous group with a start date) + addEmbargoField(context, bit, bitstream); + bitstream.getField().add(createValue("format", bit.getFormat(context).getMIMEType())); bitstream.getField().add(createValue("size", "" + bit.getSizeBytes())); bitstream.getField().add(createValue("url", url)); @@ -153,6 +167,24 @@ private static Element createBundlesElement(Context context, Item item) throws S return bundles; } + private static void addEmbargoField(Context context, Bitstream bit, Element bitstream) throws SQLException { + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + List policies = authorizeService.findPoliciesByDSOAndType(context, bit, ResourcePolicy.TYPE_CUSTOM); + + for (ResourcePolicy policy : policies) { + if (policy.getGroup() == anonymousGroup && policy.getAction() == Constants.READ) { + Date startDate = policies.get(0).getStartDate(); + + if (startDate != null && startDate.after(new Date())) { + SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); + bitstream.getField().add( + createValue("embargo", formatter.format(startDate))); + } + } + } + } + private static Element createLicenseElement(Context context, Item item) throws SQLException, AuthorizeException, IOException { Element license = create("license"); diff --git a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl index a3a4e6667046..b9d81aef5da8 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl @@ -123,6 +123,11 @@ + + + + From c5ca59f2e53fa611ad9e9d9ab0f1a382b5db7368 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 28 Jun 2023 19:27:35 +0100 Subject: [PATCH 394/686] ItemUtils.java: added method doc (cherry picked from commit 51e60fbcf92ea731c4e355c9cf080d251ffbf68f) --- .../main/java/org/dspace/xoai/util/ItemUtils.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 1733440afd95..6821e1ced0ec 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -167,10 +167,17 @@ private static Element createBundlesElement(Context context, Item item) throws S return bundles; } - private static void addEmbargoField(Context context, Bitstream bit, Element bitstream) throws SQLException { + /** + * This method will add embargo metadata for all bitstreams with an active embargo + * @param context + * @param bitstream the bitstream object + * @param bitstreamEl the bitstream metadata object to add embargo value to + * @throws SQLException + */ + private static void addEmbargoField(Context context, Bitstream bitstream, Element bitstreamEl) throws SQLException { GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); - List policies = authorizeService.findPoliciesByDSOAndType(context, bit, ResourcePolicy.TYPE_CUSTOM); + List policies = authorizeService.findPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_CUSTOM); for (ResourcePolicy policy : policies) { if (policy.getGroup() == anonymousGroup && policy.getAction() == Constants.READ) { @@ -178,7 +185,7 @@ private static void addEmbargoField(Context context, Bitstream bit, Element bits if (startDate != null && startDate.after(new Date())) { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - bitstream.getField().add( + bitstreamEl.getField().add( createValue("embargo", formatter.format(startDate))); } } From 3f5bfe4aa4303fe4785164c8d3014408cc46b28b Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Thu, 29 Jun 2023 09:06:08 +0100 Subject: [PATCH 395/686] ItemUtils.java: improved method to account for multiple embargo policies and select the longest embargo (cherry picked from commit 538be7f09ba790a4ab7099e7027e1e8f6a9c62ea) --- .../java/org/dspace/xoai/util/ItemUtils.java | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 6821e1ced0ec..2d252ff47698 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -14,6 +14,8 @@ import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; +import java.util.ArrayList; +import java.util.Collections; import com.lyncode.xoai.dataprovider.xml.xoai.Element; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; @@ -168,7 +170,8 @@ private static Element createBundlesElement(Context context, Item item) throws S } /** - * This method will add embargo metadata for all bitstreams with an active embargo + * This method will add embargo metadata for a give bitstream with an active embargo. + * It will parse of relevant policies and select the longest active embargo * @param context * @param bitstream the bitstream object * @param bitstreamEl the bitstream metadata object to add embargo value to @@ -179,17 +182,23 @@ private static void addEmbargoField(Context context, Bitstream bitstream, Elemen Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); List policies = authorizeService.findPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_CUSTOM); + List embargoDates = new ArrayList<>(); + // Account for cases where there could be more than one embargo policy for (ResourcePolicy policy : policies) { if (policy.getGroup() == anonymousGroup && policy.getAction() == Constants.READ) { - Date startDate = policies.get(0).getStartDate(); - + Date startDate = policy.getStartDate(); if (startDate != null && startDate.after(new Date())) { - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - bitstreamEl.getField().add( - createValue("embargo", formatter.format(startDate))); + embargoDates.add(startDate); } } } + if (embargoDates.size() >= 1) { + // Sort array of dates to extract the longest embargo + SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); + Collections.sort(embargoDates, Date::compareTo); + bitstreamEl.getField().add( + createValue("embargo", formatter.format(embargoDates.get(embargoDates.size() - 1)))); + } } private static Element createLicenseElement(Context context, Item item) From f88207792ad2bde02b28a9b7a834823f5ffc71dd Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 2 Aug 2023 15:24:29 +0100 Subject: [PATCH 396/686] Refactored access-status to include embargo date based on the DefaultAccessStatusHelper logic (look at primary or first bitstream) (cherry picked from commit 895926f021a355181faef47b5c41e78031700475) --- .../access/status/AccessStatusHelper.java | 10 +++ .../status/AccessStatusServiceImpl.java | 5 ++ .../status/DefaultAccessStatusHelper.java | 89 ++++++++++++++++++- .../status/service/AccessStatusService.java | 12 +++ .../status/DefaultAccessStatusHelperTest.java | 7 ++ .../AccessStatusElementItemCompilePlugin.java | 14 +++ .../oai/metadataFormats/uketd_dc.xsl | 11 +-- 7 files changed, 139 insertions(+), 9 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java index 1cacbf6aedf6..d847e907b403 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java @@ -27,4 +27,14 @@ public interface AccessStatusHelper { */ public String getAccessStatusFromItem(Context context, Item item, Date threshold) throws SQLException; + + /** + * Retrieve embargo information for the item + * + * @param context the DSpace context + * @param item the item to check for embargo information + * @return an embargo date + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getEmbargoFromItem(Context context, Item item) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java index 544dc99cb4dd..f0f68b22a195 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java @@ -63,4 +63,9 @@ public void init() throws Exception { public String getAccessStatus(Context context, Item item) throws SQLException { return helper.getAccessStatusFromItem(context, item, forever_date); } + + @Override + public String getEmbargoFromItem(Context context, Item item) throws SQLException { + return helper.getEmbargoFromItem(context, item); + } } diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java index a67fa67af3b9..e7055181aa95 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -26,6 +26,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; +import org.joda.time.LocalDate; /** * Default plugin implementation of the access status helper. @@ -33,6 +34,11 @@ * calculate the access status of an item based on the policies of * the primary or the first bitstream in the original bundle. * Users can override this method for enhanced functionality. + * + * The getEmbargoInformationFromItem method provides a simple logic to + * * retrieve embargo information of bitstreams from an item based on the policies of + * * the primary or the first bitstream in the original bundle. + * * Users can override this method for enhanced functionality. */ public class DefaultAccessStatusHelper implements AccessStatusHelper { public static final String EMBARGO = "embargo"; @@ -54,12 +60,12 @@ public DefaultAccessStatusHelper() { /** * Look at the item's policies to determine an access status value. - * It is also considering a date threshold for embargos and restrictions. + * It is also considering a date threshold for embargoes and restrictions. * * If the item is null, simply returns the "unknown" value. * * @param context the DSpace context - * @param item the item to embargo + * @param item the item to check for embargoes * @param threshold the embargo threshold date * @return an access status value */ @@ -86,7 +92,7 @@ public String getAccessStatusFromItem(Context context, Item item, Date threshold .findFirst() .orElse(null); } - return caculateAccessStatusForDso(context, bitstream, threshold); + return calculateAccessStatusForDso(context, bitstream, threshold); } /** @@ -104,7 +110,7 @@ public String getAccessStatusFromItem(Context context, Item item, Date threshold * @param threshold the embargo threshold date * @return an access status value */ - private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) + private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) throws SQLException { if (dso == null) { return METADATA_ONLY; @@ -156,4 +162,79 @@ private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Dat } return RESTRICTED; } + + /** + * Look at the policies of the primary (or first) bitstream of the item to retrieve its embargo. + * + * If the item is null, simply returns an empty map with no embargo information. + * + * @param context the DSpace context + * @param item the item to embargo + * @return an access status value + */ + @Override + public String getEmbargoFromItem(Context context, Item item) + throws SQLException { + Date embargoDate; + + if (item == null) { + return null; + } + // Consider only the original bundles. + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + // Check for primary bitstreams first. + Bitstream bitstream = bundles.stream() + .map(bundle -> bundle.getPrimaryBitstream()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + if (bitstream == null) { + // If there is no primary bitstream, + // take the first bitstream in the bundles. + bitstream = bundles.stream() + .map(bundle -> bundle.getBitstreams()) + .flatMap(List::stream) + .findFirst() + .orElse(null); + } + + embargoDate = this.retrieveLongestEmbargo(context, bitstream); + + return embargoDate != null ? embargoDate.toString() : null; + } + + /** + * + */ + private Date retrieveLongestEmbargo(Context context, Bitstream bitstream) throws SQLException { + Date embargoDate = null; + // Only consider read policies. + List policies = authorizeService + .getPoliciesActionFilter(context, bitstream, Constants.READ); + + // Looks at all read policies. + for (ResourcePolicy policy : policies) { + boolean isValid = resourcePolicyService.isDateValid(policy); + Group group = policy.getGroup(); + + if (group != null && StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + // Only calculate the status for the anonymous group. + if (!isValid) { + // If the policy is not valid there is an active embargo + Date startDate = policy.getStartDate(); + + if (startDate != null && !startDate.before(LocalDate.now().toDate())) { + // There is an active embargo: aim to take the longest embargo + if (embargoDate == null) { + embargoDate = startDate; + } else { + embargoDate = startDate.after(embargoDate) ? startDate : embargoDate; + } + } + } + } + } + + return embargoDate; + } } diff --git a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java index 43de5e3c47f1..937cb02692ff 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java +++ b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java @@ -8,6 +8,7 @@ package org.dspace.access.status.service; import java.sql.SQLException; +import java.util.Date; import org.dspace.content.Item; import org.dspace.core.Context; @@ -40,7 +41,18 @@ public interface AccessStatusService { * * @param context the DSpace context * @param item the item + * @return an access status value * @throws SQLException An exception that provides information on a database access error or other errors. */ public String getAccessStatus(Context context, Item item) throws SQLException; + + /** + * Retrieve embargo information for the item + * + * @param context the DSpace context + * @param item the item to check for embargo information + * @return an embargo date + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getEmbargoFromItem(Context context, Item item) throws SQLException; } diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java index a41e985deb32..9d90452beeda 100644 --- a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -8,6 +8,7 @@ package org.dspace.access.status; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; @@ -273,6 +274,8 @@ public void testWithEmbargo() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo); + assertThat("testWithEmbargo 1", embargoDate, equalTo(policy.getStartDate().toString())); } /** @@ -390,6 +393,8 @@ public void testWithPrimaryAndMultipleBitstreams() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithPrimaryAndMultipleBitstreams); + assertThat("testWithPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(policy.getStartDate().toString())); } /** @@ -419,5 +424,7 @@ public void testWithNoPrimaryAndMultipleBitstreams() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo); + assertThat("testWithNoPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(null)); } } diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java index 6b3c5ded9882..3201a0229178 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -12,6 +12,7 @@ import com.lyncode.xoai.dataprovider.xml.xoai.Element; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; +import org.apache.commons.lang3.StringUtils; import org.dspace.access.status.factory.AccessStatusServiceFactory; import org.dspace.access.status.service.AccessStatusService; import org.dspace.content.Item; @@ -31,6 +32,13 @@ * open.access * * + * OR + * + * + * embargo + * 2024-10-10 + * + * * } * * Returning Values are based on: @@ -46,9 +54,15 @@ public Metadata additionalMetadata(Context context, Metadata metadata, Item item String accessStatusType; accessStatusType = accessStatusService.getAccessStatus(context, item); + String embargoFromItem = accessStatusService.getEmbargoFromItem(context, item); + Element accessStatus = ItemUtils.create("access-status"); accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + if (StringUtils.isNotEmpty(embargoFromItem)) { + accessStatus.getField().add(ItemUtils.createValue("embargo", embargoFromItem)); + } + Element others; List elements = metadata.getElement(); if (ItemUtils.getElement(elements, "others") != null) { diff --git a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl index b9d81aef5da8..a180b49c561b 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl @@ -115,6 +115,12 @@ + + + + + + @@ -123,11 +129,6 @@ - - - - From f330e36528f9b3a28c7fa78fff19c684ff9c49a9 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 2 Aug 2023 16:22:54 +0100 Subject: [PATCH 397/686] Remove unused imports (cherry picked from commit 4bd2cfdf0f931aec7a05db42f255423fe806ea77) --- .../org/dspace/access/status/service/AccessStatusService.java | 1 - .../org/dspace/access/status/DefaultAccessStatusHelperTest.java | 1 - 2 files changed, 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java index 937cb02692ff..2ed47bde4cd2 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java +++ b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java @@ -8,7 +8,6 @@ package org.dspace.access.status.service; import java.sql.SQLException; -import java.util.Date; import org.dspace.content.Item; import org.dspace.core.Context; diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java index 9d90452beeda..f450f72e6a81 100644 --- a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -8,7 +8,6 @@ package org.dspace.access.status; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.fail; From beaf40dd9bc48754f6883d7da7d18821a7b9c8a0 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 2 Aug 2023 18:01:07 +0100 Subject: [PATCH 398/686] Fix style issues (cherry picked from commit 724a4ffb0ed9ffefb2866930655767590b462bb5) --- .../src/main/java/org/dspace/xoai/util/ItemUtils.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 2d252ff47698..2af526c56046 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -13,9 +13,9 @@ import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.Date; -import java.util.List; import java.util.ArrayList; import java.util.Collections; +import java.util.List; import com.lyncode.xoai.dataprovider.xml.xoai.Element; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; @@ -180,7 +180,9 @@ private static Element createBundlesElement(Context context, Item item) throws S private static void addEmbargoField(Context context, Bitstream bitstream, Element bitstreamEl) throws SQLException { GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); - List policies = authorizeService.findPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_CUSTOM); + List policies = authorizeService.findPoliciesByDSOAndType(context, + bitstream, + ResourcePolicy.TYPE_CUSTOM); List embargoDates = new ArrayList<>(); // Account for cases where there could be more than one embargo policy From 31251183f3f9c958872787283f1adf1c75bb15ef Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 2 Aug 2023 18:56:05 +0100 Subject: [PATCH 399/686] Fix style issues (cherry picked from commit 6e2c8a4ae0068d844d0fc796001c170c8849babf) --- dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 2af526c56046..6a0808259e8e 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -12,9 +12,9 @@ import java.io.InputStream; import java.sql.SQLException; import java.text.SimpleDateFormat; -import java.util.Date; import java.util.ArrayList; import java.util.Collections; +import java.util.Date; import java.util.List; import com.lyncode.xoai.dataprovider.xml.xoai.Element; From d154936a3d9eb4cf48d323da15ac55623293cee7 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Thu, 3 Aug 2023 08:54:01 +0100 Subject: [PATCH 400/686] Add null check (cherry picked from commit 0de4c3945ed7f30d41841cda4bf01acf9ffc130f) --- .../org/dspace/access/status/DefaultAccessStatusHelper.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java index e7055181aa95..9b5227491bc0 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -198,6 +198,10 @@ public String getEmbargoFromItem(Context context, Item item) .orElse(null); } + if (bitstream == null) { + return null; + } + embargoDate = this.retrieveLongestEmbargo(context, bitstream); return embargoDate != null ? embargoDate.toString() : null; From 08e82ebf28eee4ea9a8517308815e7ae77a38a6e Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Thu, 3 Aug 2023 09:54:00 +0100 Subject: [PATCH 401/686] ItemUtils.java: refactored addEmbargoField (cherry picked from commit 291afa765d29836a67727fdd2f82ac0c9f9310c4) --- .../java/org/dspace/xoai/util/ItemUtils.java | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 6a0808259e8e..80eb67a2b994 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -12,8 +12,6 @@ import java.io.InputStream; import java.sql.SQLException; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collections; import java.util.Date; import java.util.List; @@ -184,22 +182,28 @@ private static void addEmbargoField(Context context, Bitstream bitstream, Elemen bitstream, ResourcePolicy.TYPE_CUSTOM); - List embargoDates = new ArrayList<>(); + Date embargoDate = null; + // Account for cases where there could be more than one embargo policy for (ResourcePolicy policy : policies) { if (policy.getGroup() == anonymousGroup && policy.getAction() == Constants.READ) { Date startDate = policy.getStartDate(); if (startDate != null && startDate.after(new Date())) { - embargoDates.add(startDate); + // There is an active embargo: aim to take the longest embargo + if (embargoDate == null) { + embargoDate = startDate; + } else { + embargoDate = startDate.after(embargoDate) ? startDate : embargoDate; + } } } } - if (embargoDates.size() >= 1) { + + if (embargoDate != null) { // Sort array of dates to extract the longest embargo SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - Collections.sort(embargoDates, Date::compareTo); bitstreamEl.getField().add( - createValue("embargo", formatter.format(embargoDates.get(embargoDates.size() - 1)))); + createValue("embargo", formatter.format(embargoDate))); } } From 9a51fb87688f967a747d5c6f76321ee0a5a60ec3 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Thu, 3 Aug 2023 14:30:33 +0100 Subject: [PATCH 402/686] uketd_dc.xsl: also expose access-status if embargo or restricted (cherry picked from commit 4b40872a6d5a3934c1f79c6babf439a21ce25f66) --- dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl index a180b49c561b..5c434e49ed35 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/uketd_dc.xsl @@ -115,6 +115,14 @@ + + + + + + + + From db733101b6e670755e56f23b44064b20c7bacb25 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Thu, 14 Sep 2023 10:02:24 +0100 Subject: [PATCH 403/686] DefaultAccessStatusHelper: fix logic to take shortest embargo (cherry picked from commit d17ef09082aa237cffdc928d9560667487c2c976) --- .../access/status/DefaultAccessStatusHelper.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java index 9b5227491bc0..05f0757060ab 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -175,7 +175,7 @@ private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Da @Override public String getEmbargoFromItem(Context context, Item item) throws SQLException { - Date embargoDate; + Date embargoedDate; if (item == null) { return null; @@ -202,15 +202,15 @@ public String getEmbargoFromItem(Context context, Item item) return null; } - embargoDate = this.retrieveLongestEmbargo(context, bitstream); + embargoedDate = this.retrieveShortestEmbargo(context, bitstream); - return embargoDate != null ? embargoDate.toString() : null; + return embargoedDate != null ? embargoedDate.toString() : null; } /** * */ - private Date retrieveLongestEmbargo(Context context, Bitstream bitstream) throws SQLException { + private Date retrieveShortestEmbargo(Context context, Bitstream bitstream) throws SQLException { Date embargoDate = null; // Only consider read policies. List policies = authorizeService @@ -228,11 +228,12 @@ private Date retrieveLongestEmbargo(Context context, Bitstream bitstream) throws Date startDate = policy.getStartDate(); if (startDate != null && !startDate.before(LocalDate.now().toDate())) { - // There is an active embargo: aim to take the longest embargo + // There is an active embargo: aim to take the shortest embargo (account for rare cases where + // more than one resource policy exists) if (embargoDate == null) { embargoDate = startDate; } else { - embargoDate = startDate.after(embargoDate) ? startDate : embargoDate; + embargoDate = startDate.before(embargoDate) ? startDate : embargoDate; } } } From a211edaeb282b1301936eddab114b0ca0ae6b8ad Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Thu, 14 Sep 2023 16:39:39 +0100 Subject: [PATCH 404/686] Remove currently unused customisation of ItemUtils (cherry picked from commit 490a982e8055991a6b8cbacece22b924466e22df) --- .../java/org/dspace/xoai/util/ItemUtils.java | 54 ------------------- 1 file changed, 54 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 80eb67a2b994..35bef8c8d77f 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -11,8 +11,6 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; -import java.text.SimpleDateFormat; -import java.util.Date; import java.util.List; import com.lyncode.xoai.dataprovider.xml.xoai.Element; @@ -23,9 +21,6 @@ import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.MetadataExposureService; import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.ResourcePolicy; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -39,9 +34,6 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.Utils; -import org.dspace.eperson.Group; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.xoai.data.DSpaceItem; @@ -65,9 +57,6 @@ public class ItemUtils { private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); - private static final AuthorizeService authorizeService = - AuthorizeServiceFactory.getInstance().getAuthorizeService(); - private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); /** @@ -152,9 +141,6 @@ private static Element createBundlesElement(Context context, Item item) throws S if (description != null) { bitstream.getField().add(createValue("description", description)); } - // Add bitstream embargo information (READ policy present, for Anonymous group with a start date) - addEmbargoField(context, bit, bitstream); - bitstream.getField().add(createValue("format", bit.getFormat(context).getMIMEType())); bitstream.getField().add(createValue("size", "" + bit.getSizeBytes())); bitstream.getField().add(createValue("url", url)); @@ -167,46 +153,6 @@ private static Element createBundlesElement(Context context, Item item) throws S return bundles; } - /** - * This method will add embargo metadata for a give bitstream with an active embargo. - * It will parse of relevant policies and select the longest active embargo - * @param context - * @param bitstream the bitstream object - * @param bitstreamEl the bitstream metadata object to add embargo value to - * @throws SQLException - */ - private static void addEmbargoField(Context context, Bitstream bitstream, Element bitstreamEl) throws SQLException { - GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); - Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); - List policies = authorizeService.findPoliciesByDSOAndType(context, - bitstream, - ResourcePolicy.TYPE_CUSTOM); - - Date embargoDate = null; - - // Account for cases where there could be more than one embargo policy - for (ResourcePolicy policy : policies) { - if (policy.getGroup() == anonymousGroup && policy.getAction() == Constants.READ) { - Date startDate = policy.getStartDate(); - if (startDate != null && startDate.after(new Date())) { - // There is an active embargo: aim to take the longest embargo - if (embargoDate == null) { - embargoDate = startDate; - } else { - embargoDate = startDate.after(embargoDate) ? startDate : embargoDate; - } - } - } - } - - if (embargoDate != null) { - // Sort array of dates to extract the longest embargo - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - bitstreamEl.getField().add( - createValue("embargo", formatter.format(embargoDate))); - } - } - private static Element createLicenseElement(Context context, Item item) throws SQLException, AuthorizeException, IOException { Element license = create("license"); From 572ebb5b2704b3cce43290c00c443de69fee96fd Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 20 Sep 2023 16:26:14 +0100 Subject: [PATCH 405/686] DefaultAccessStatusHelper: getEmbargoFromItem return null embargo if status than embargo (cherry picked from commit e05e73a112ce60bd0689ce68af442382712bd5fc) --- .../dspace/access/status/AccessStatusHelper.java | 4 +++- .../access/status/AccessStatusServiceImpl.java | 2 +- .../access/status/DefaultAccessStatusHelper.java | 13 ++++++++----- .../status/DefaultAccessStatusHelperTest.java | 6 +++--- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java index d847e907b403..2d782dc3b82a 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java @@ -22,6 +22,7 @@ public interface AccessStatusHelper { * * @param context the DSpace context * @param item the item + * @param threshold the embargo threshold date * @return an access status value * @throws SQLException An exception that provides information on a database access error or other errors. */ @@ -33,8 +34,9 @@ public String getAccessStatusFromItem(Context context, Item item, Date threshold * * @param context the DSpace context * @param item the item to check for embargo information + * @param threshold the embargo threshold date * @return an embargo date * @throws SQLException An exception that provides information on a database access error or other errors. */ - public String getEmbargoFromItem(Context context, Item item) throws SQLException; + public String getEmbargoFromItem(Context context, Item item, Date threshold) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java index f0f68b22a195..e1f11285d840 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java @@ -66,6 +66,6 @@ public String getAccessStatus(Context context, Item item) throws SQLException { @Override public String getEmbargoFromItem(Context context, Item item) throws SQLException { - return helper.getEmbargoFromItem(context, item); + return helper.getEmbargoFromItem(context, item, forever_date); } } diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java index 05f0757060ab..5f0e6d8b259b 100644 --- a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -173,11 +173,14 @@ private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Da * @return an access status value */ @Override - public String getEmbargoFromItem(Context context, Item item) + public String getEmbargoFromItem(Context context, Item item, Date threshold) throws SQLException { - Date embargoedDate; + Date embargoDate; - if (item == null) { + // If Item status is not "embargo" then return a null embargo date. + String accessStatus = getAccessStatusFromItem(context, item, threshold); + + if (item == null || !accessStatus.equals(EMBARGO)) { return null; } // Consider only the original bundles. @@ -202,9 +205,9 @@ public String getEmbargoFromItem(Context context, Item item) return null; } - embargoedDate = this.retrieveShortestEmbargo(context, bitstream); + embargoDate = this.retrieveShortestEmbargo(context, bitstream); - return embargoedDate != null ? embargoedDate.toString() : null; + return embargoDate != null ? embargoDate.toString() : null; } /** diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java index f450f72e6a81..1134990e84f4 100644 --- a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -273,7 +273,7 @@ public void testWithEmbargo() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); - String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold); assertThat("testWithEmbargo 1", embargoDate, equalTo(policy.getStartDate().toString())); } @@ -392,7 +392,7 @@ public void testWithPrimaryAndMultipleBitstreams() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); - String embargoDate = helper.getEmbargoFromItem(context, itemWithPrimaryAndMultipleBitstreams); + String embargoDate = helper.getEmbargoFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); assertThat("testWithPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(policy.getStartDate().toString())); } @@ -423,7 +423,7 @@ public void testWithNoPrimaryAndMultipleBitstreams() throws Exception { context.restoreAuthSystemState(); String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); - String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold); assertThat("testWithNoPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(null)); } } From 99b47939c6975a700494b8585538ce994c58d3e9 Mon Sep 17 00:00:00 2001 From: eskander Date: Thu, 28 Sep 2023 18:52:06 +0300 Subject: [PATCH 406/686] [CST-11738] ORCID lookup with more data --- dspace/config/modules/orcid.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/modules/orcid.cfg b/dspace/config/modules/orcid.cfg index bc4d8cfdd90e..a702eec3acf2 100644 --- a/dspace/config/modules/orcid.cfg +++ b/dspace/config/modules/orcid.cfg @@ -16,7 +16,7 @@ orcid.webhook.registration-mode = only_linked # your webhooks orcid.webhook.registration-token = 01dfd257-c13f-43df-a0e2-9bb6c3cc7069 -orcid.authority.prefix = will be referenced::ORCID:: +orcid.authority.prefix = will be generated::ORCID:: orcid.linkable-metadata-fields.ignore = From 33d2ee961d95b8e7506d5bedf0ec26c1341b750c Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Mon, 2 Oct 2023 17:44:50 +1300 Subject: [PATCH 407/686] Additional Item class cast fixes in handle providers DSOs were not properly checked if they were instanceof Item before attempting the cast in HandleIdentifierProvider and VersionedHandleIdentifierProviderWithCanonicalHandles --- .../org/dspace/identifier/HandleIdentifierProvider.java | 5 ++--- ...rsionedHandleIdentifierProviderWithCanonicalHandles.java | 6 +++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java index 59a1e13a2166..82358362da85 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java @@ -68,10 +68,9 @@ public String register(Context context, DSpaceObject dso) { try { String id = mint(context, dso); - // move canonical to point the latest version + // Populate metadata if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { - Item item = (Item) dso; - populateHandleMetadata(context, item, id); + populateHandleMetadata(context, dso, id); } return id; diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index e6a092c47284..9993f78b4dd5 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -95,11 +95,11 @@ public String register(Context context, DSpaceObject dso) { String id = mint(context, dso); // move canonical to point the latest version - if (dso != null && dso.getType() == Constants.ITEM) { + if (dso.getType() == Constants.ITEM && dso instanceof Item) { Item item = (Item) dso; - VersionHistory history = null; + VersionHistory history; try { - history = versionHistoryService.findByItem(context, (Item) dso); + history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { throw new RuntimeException("A problem with the database connection occured.", ex); } From 1c1266b85541f9492ae0e23609c7e4e88f934c65 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 2 Oct 2023 10:55:43 -0500 Subject: [PATCH 408/686] Remove Oracle script that accidentally made it in via #8800 (cherry picked from commit 5e04edf41e452cd383597680da9c3101211156b8) --- .../V7.6_2023.04.19__process_parameters_to_text_type.sql | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql deleted file mode 100644 index 6b2dd705ea68..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql +++ /dev/null @@ -1,9 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -ALTER TABLE process MODIFY (parameters CLOB); From 2b25321362e48018c741c9561b9cdde8e6264b24 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 2 Oct 2023 18:00:09 +0200 Subject: [PATCH 409/686] 8968 - request-a-copy email: non ASCII characters are encoded as HTML character entity references (cherry picked from commit db36d5eeae3e76b61178c2c7ac4243bc2fc20a97) --- .../dspace/app/rest/repository/RequestItemRepository.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 7c0694c52f26..8a60867f9e99 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -173,11 +173,10 @@ public RequestItemRest createAndReturn(Context ctx) username = user.getFullName(); } else { // An anonymous session may provide a name. // Escape username to evade nasty XSS attempts - username = StringEscapeUtils.escapeHtml4(rir.getRequestName()); + username = rir.getRequestName(); } - // Requester's message text, escaped to evade nasty XSS attempts - String message = StringEscapeUtils.escapeHtml4(rir.getRequestMessage()); + String message = rir.getRequestMessage(); // Create the request. String token; From e4d54bba2701cf6aa22adb5f27af9cf5afe4ad83 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 2 Oct 2023 18:38:33 +0200 Subject: [PATCH 410/686] unused import (cherry picked from commit bf6e042085140e305d43d61ddce564fbfe819c7f) --- .../org/dspace/app/rest/repository/RequestItemRepository.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 8a60867f9e99..bc276d73d5d6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.validator.routines.EmailValidator; import org.apache.http.client.utils.URIBuilder; import org.apache.logging.log4j.LogManager; From 8d4fd1ce6c3bd8a70673d227cbd2e95c7e7d2412 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= Date: Fri, 6 Oct 2023 17:27:53 +0200 Subject: [PATCH 411/686] 8968 - added custom StringEscapper (cherry picked from commit 103c8ee75771d3d9e58e530b8855d07cc14598c2) --- .../org/dspace/util/StringEscapeUtils.java | 49 +++++++++++++++++++ .../repository/RequestItemRepository.java | 6 ++- 2 files changed, 53 insertions(+), 2 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java diff --git a/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java b/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java new file mode 100644 index 000000000000..86010a5c19f1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.text.translate.AggregateTranslator; +import org.apache.commons.text.translate.CharSequenceTranslator; +import org.apache.commons.text.translate.EntityArrays; +import org.apache.commons.text.translate.LookupTranslator; + +public class StringEscapeUtils extends org.apache.commons.text.StringEscapeUtils { + public static final CharSequenceTranslator ESCAPE_MAIL; + static { + final Map escapeMailMap = new HashMap<>(); + escapeMailMap.put("#", "#"); + ESCAPE_MAIL = new AggregateTranslator( + new LookupTranslator(EntityArrays.BASIC_ESCAPE), + new LookupTranslator(EntityArrays.APOS_ESCAPE), + new LookupTranslator(Collections.unmodifiableMap(escapeMailMap)) + ); + } + + /** + * Escapes the characters in a {@code String} using custom rules to avoid XSS attacks. + * + *

    Escapes user-entered text that is sent with mail to avoid possible XSS attacks. + * It escapes double-quote, ampersand, less-than, greater-than, apostrophe, number sign (", &, <, >,',#)

    + * + *

    Example:

    + *
    +     * input string: 
    lá lé lí ló LÚ pingüino & yo #
    !!" + * output string: <div attr="*x" onblur="alert(1)*"> lá lé lí ló LÚ pingüino & yo # </div>!! + *
    + * + * @param input String to escape values in, may be null + * @return String with escaped values, {@code null} if null string input + */ + public static final String escapeMail(final String input) { + return ESCAPE_MAIL.translate(input); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index bc276d73d5d6..863a5c414666 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -42,6 +42,7 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; +import org.dspace.util.StringEscapeUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -172,10 +173,11 @@ public RequestItemRest createAndReturn(Context ctx) username = user.getFullName(); } else { // An anonymous session may provide a name. // Escape username to evade nasty XSS attempts - username = rir.getRequestName(); + username = StringEscapeUtils.escapeMail(rir.getRequestName()); } - String message = rir.getRequestMessage(); + // Requester's message text, escaped to evade nasty XSS attempts + String message = StringEscapeUtils.escapeMail(rir.getRequestMessage()); // Create the request. String token; From 36ad3de11fdedad0f8a7dfb6f16f3b1403accaa1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= Date: Fri, 6 Oct 2023 17:42:14 +0200 Subject: [PATCH 412/686] checkstyle (cherry picked from commit 2c2b3b18dc781054539add48ca74e4bf688c400c) --- .../src/main/java/org/dspace/util/StringEscapeUtils.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java b/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java index 86010a5c19f1..dfc89ca1941f 100644 --- a/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java @@ -37,7 +37,8 @@ public class StringEscapeUtils extends org.apache.commons.text.StringEscapeUtils *

    Example:

    *
          * input string: 
    lá lé lí ló LÚ pingüino & yo #
    !!" - * output string: <div attr="*x" onblur="alert(1)*"> lá lé lí ló LÚ pingüino & yo # </div>!! + * output string: <div attr="*x" onblur="alert(1)*"> lá lé lí ló LÚ + * pingüino & yo # </div>!! *
    * * @param input String to escape values in, may be null From 11dc69ac349c3d8125aaf1102396c97ebca5e5ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= Date: Mon, 9 Oct 2023 18:01:46 +0200 Subject: [PATCH 413/686] 8968 - implementated using HtmlUtils scaping (cherry picked from commit 090beedb6f692df29d1a61d4c2e6fde09d4b4c1d) --- .../org/dspace/util/StringEscapeUtils.java | 50 ------------------- .../repository/RequestItemRepository.java | 8 +-- 2 files changed, 4 insertions(+), 54 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java diff --git a/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java b/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java deleted file mode 100644 index dfc89ca1941f..000000000000 --- a/dspace-api/src/main/java/org/dspace/util/StringEscapeUtils.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.util; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.text.translate.AggregateTranslator; -import org.apache.commons.text.translate.CharSequenceTranslator; -import org.apache.commons.text.translate.EntityArrays; -import org.apache.commons.text.translate.LookupTranslator; - -public class StringEscapeUtils extends org.apache.commons.text.StringEscapeUtils { - public static final CharSequenceTranslator ESCAPE_MAIL; - static { - final Map escapeMailMap = new HashMap<>(); - escapeMailMap.put("#", "#"); - ESCAPE_MAIL = new AggregateTranslator( - new LookupTranslator(EntityArrays.BASIC_ESCAPE), - new LookupTranslator(EntityArrays.APOS_ESCAPE), - new LookupTranslator(Collections.unmodifiableMap(escapeMailMap)) - ); - } - - /** - * Escapes the characters in a {@code String} using custom rules to avoid XSS attacks. - * - *

    Escapes user-entered text that is sent with mail to avoid possible XSS attacks. - * It escapes double-quote, ampersand, less-than, greater-than, apostrophe, number sign (", &, <, >,',#)

    - * - *

    Example:

    - *
    -     * input string: 
    lá lé lí ló LÚ pingüino & yo #
    !!" - * output string: <div attr="*x" onblur="alert(1)*"> lá lé lí ló LÚ - * pingüino & yo # </div>!! - *
    - * - * @param input String to escape values in, may be null - * @return String with escaped values, {@code null} if null string input - */ - public static final String escapeMail(final String input) { - return ESCAPE_MAIL.translate(input); - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 863a5c414666..945afe16e82c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -42,13 +42,13 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; -import org.dspace.util.StringEscapeUtils; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; - +import org.springframework.web.util.HtmlUtils; /** * Component to expose item requests. * @@ -173,11 +173,11 @@ public RequestItemRest createAndReturn(Context ctx) username = user.getFullName(); } else { // An anonymous session may provide a name. // Escape username to evade nasty XSS attempts - username = StringEscapeUtils.escapeMail(rir.getRequestName()); + username = HtmlUtils.htmlEscape(rir.getRequestName(),"UTF-8"); } // Requester's message text, escaped to evade nasty XSS attempts - String message = StringEscapeUtils.escapeMail(rir.getRequestMessage()); + String message = HtmlUtils.htmlEscape(rir.getRequestMessage(),"UTF-8"); // Create the request. String token; From d957dc2007f6f71e1a279191f202f316ff601d30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= Date: Mon, 9 Oct 2023 18:18:35 +0200 Subject: [PATCH 414/686] checkstiye (cherry picked from commit d12fbe2c340e18e42dba4380ee9976bccb4ca421) --- .../org/dspace/app/rest/repository/RequestItemRepository.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 945afe16e82c..f45dbee66f34 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -42,7 +42,6 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; - import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; From c8e4a0fd90358d42048f97619fbbda744dd1564d Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Thu, 12 Oct 2023 16:45:48 -0400 Subject: [PATCH 415/686] Define _version_ --- dspace/solr/authority/conf/schema.xml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dspace/solr/authority/conf/schema.xml b/dspace/solr/authority/conf/schema.xml index 6c32819302d0..511dbabd477c 100644 --- a/dspace/solr/authority/conf/schema.xml +++ b/dspace/solr/authority/conf/schema.xml @@ -87,9 +87,20 @@ + + + From 2fed310bedd272b0c0e1ed1af6f2894dca7bb464 Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 13 Oct 2023 10:56:50 +0300 Subject: [PATCH 416/686] [CST-11738] import more information from ORCID Person --- .../ExternalDataProviderImportFiller.java | 7 +- .../impl/OrcidV3AuthorDataProvider.java | 81 ++++++++++++++++--- .../external/provider/orcid/xml/XMLtoBio.java | 12 +++ .../config/spring/api/external-services.xml | 5 ++ 4 files changed, 92 insertions(+), 13 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java b/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java index ef218c76fb34..7a7d10e63499 100644 --- a/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java +++ b/dspace-api/src/main/java/org/dspace/authority/filler/ExternalDataProviderImportFiller.java @@ -7,7 +7,6 @@ */ package org.dspace.authority.filler; -import static org.apache.commons.collections.CollectionUtils.isEmpty; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.removeStart; import static org.apache.commons.lang3.StringUtils.startsWith; @@ -110,7 +109,11 @@ private void enrichItemWithExternalData(Context context, Item item, ExternalData } private boolean notAlreadyPresent(Item item, MetadataValueDTO value) { - return isEmpty(itemService.getMetadata(item, value.getSchema(), value.getElement(), value.getQualifier(), ANY)); + List metadataValues = itemService.getMetadata(item, value.getSchema(), + value.getElement(), value.getQualifier(), ANY); + + return metadataValues.stream().noneMatch(metadataValue -> + metadataValue.getValue().equals(value.getValue())); } private boolean isTitleNotSet(Item item) { diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java index 7a836113936c..a07cf89c503e 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java @@ -16,6 +16,7 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -27,6 +28,7 @@ import org.apache.http.impl.client.HttpClientBuilder; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.MetadataFieldName; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.OrcidRestConnector; import org.dspace.external.model.ExternalDataObject; @@ -35,6 +37,7 @@ import org.json.JSONObject; import org.orcid.jaxb.model.v3.release.common.OrcidIdentifier; import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.Record; import org.orcid.jaxb.model.v3.release.search.Result; import org.springframework.beans.factory.annotation.Autowired; @@ -60,6 +63,8 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider { private XMLtoBio converter; + private Map externalIdentifiers; + public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})"; private static final int MAX_INDEX = 10000; @@ -113,12 +118,13 @@ public void init() throws IOException { @Override public Optional getExternalDataObject(String id) { - Person person = getBio(id); - ExternalDataObject externalDataObject = convertToExternalDataObject(person); + Record record = getBio(id); + ExternalDataObject externalDataObject = convertToExternalDataObject(record); return Optional.of(externalDataObject); } - protected ExternalDataObject convertToExternalDataObject(Person person) { + protected ExternalDataObject convertToExternalDataObject(Record record) { + Person person = record.getPerson(); ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier); if (person.getName() != null) { String lastName = ""; @@ -141,6 +147,12 @@ protected ExternalDataObject convertToExternalDataObject(Person person) { externalDataObject .addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null, orcidUrl + '/' + person.getName().getPath())); + + appendOtherNames(externalDataObject, person); + appendResearcherUrls(externalDataObject, person); + appendExternalIdentifiers(externalDataObject, person); + appendAffiliations(externalDataObject, record); + if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) { externalDataObject.setDisplayValue(lastName + ", " + firstName); externalDataObject.setValue(lastName + ", " + firstName); @@ -157,24 +169,64 @@ protected ExternalDataObject convertToExternalDataObject(Person person) { return externalDataObject; } + private void appendOtherNames(ExternalDataObject externalDataObject, Person person) { + person.getOtherNames().getOtherNames().forEach(otherName -> + externalDataObject.addMetadata(new MetadataValueDTO("crisrp", "name", "variant", null, + otherName.getContent()))); + } + + private void appendResearcherUrls(ExternalDataObject externalDataObject, Person person) { + person.getResearcherUrls().getResearcherUrls().forEach(researcherUrl -> + externalDataObject.addMetadata(new MetadataValueDTO("oairecerif", "identifier", "url", null, + researcherUrl.getUrl().getValue()))); + } + + private void appendExternalIdentifiers(ExternalDataObject externalDataObject, Person person) { + if (getExternalIdentifiers() != null) { + person.getExternalIdentifiers() + .getExternalIdentifiers() + .forEach(externalIdentifier -> { + String metadataField = externalIdentifiers.get(externalIdentifier.getType()); + if (StringUtils.isNotEmpty(metadataField)) { + MetadataFieldName field = new MetadataFieldName(metadataField); + externalDataObject.addMetadata( + new MetadataValueDTO(field.schema, field.element, field.qualifier, null, + externalIdentifier.getValue())); + } + }); + } + } + + private void appendAffiliations(ExternalDataObject externalDataObject, Record record) { + record.getActivitiesSummary() + .getEmployments() + .getEmploymentGroups() + .stream() + .flatMap(affiliationGroup -> + affiliationGroup.getActivities().stream()) + .forEach(employmentSummary -> + externalDataObject.addMetadata(new MetadataValueDTO("person", "affiliation", "name", + null, employmentSummary.getOrganization().getName()))); + } + /** - * Retrieve a Person object based on a given orcid identifier. + * Retrieve a Record object based on a given orcid identifier. * @param id orcid identifier - * @return Person + * @return Record */ - public Person getBio(String id) { + public Record getBio(String id) { log.debug("getBio called with ID=" + id); if (!isValid(id)) { return null; } - InputStream bioDocument = orcidRestConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken); - Person person = converter.convertSinglePerson(bioDocument); + InputStream bioDocument = orcidRestConnector.get(id, accessToken); + Record record = converter.convertToRecord(bioDocument); try { bioDocument.close(); } catch (IOException e) { log.error(e.getMessage(), e); } - return person; + return record; } /** @@ -201,13 +253,13 @@ public List searchExternalDataObjects(String query, int star log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken); InputStream bioDocument = orcidRestConnector.get(searchPath, accessToken); List results = converter.convert(bioDocument); - List bios = new LinkedList<>(); + List bios = new LinkedList<>(); for (Result result : results) { OrcidIdentifier orcidIdentifier = result.getOrcidIdentifier(); if (orcidIdentifier != null) { log.debug("Found OrcidId=" + orcidIdentifier.toString()); String orcid = orcidIdentifier.getPath(); - Person bio = getBio(orcid); + Record bio = getBio(orcid); if (bio != null) { bios.add(bio); } @@ -298,4 +350,11 @@ public void setOrcidRestConnector(OrcidRestConnector orcidRestConnector) { this.orcidRestConnector = orcidRestConnector; } + public Map getExternalIdentifiers() { + return externalIdentifiers; + } + + public void setExternalIdentifiers(Map externalIdentifiers) { + this.externalIdentifiers = externalIdentifiers; + } } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java index 25b3cf787feb..ff7cedbb47ab 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/XMLtoBio.java @@ -14,6 +14,7 @@ import org.apache.logging.log4j.Logger; import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.Record; import org.orcid.jaxb.model.v3.release.search.Result; import org.orcid.jaxb.model.v3.release.search.Search; import org.xml.sax.SAXException; @@ -64,4 +65,15 @@ public Person convertSinglePerson(InputStream xml) { } return null; } + + public Record convertToRecord(InputStream xml) { + Record record = null; + try { + record = (Record) unmarshall(xml, Record.class); + return record; + } catch (SAXException | URISyntaxException e) { + log.error(e); + } + return record; + } } diff --git a/dspace/config/spring/api/external-services.xml b/dspace/config/spring/api/external-services.xml index 43ceb79c4be7..97ca0c45de67 100644 --- a/dspace/config/spring/api/external-services.xml +++ b/dspace/config/spring/api/external-services.xml @@ -64,6 +64,11 @@ Person
    + + + + +
    From 79f219323576c1123ab34f550f0184bbac9bd6d9 Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 13 Oct 2023 11:14:11 +0300 Subject: [PATCH 417/686] [CST-11738] added researcher id metadata --- dspace/config/spring/api/external-services.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace/config/spring/api/external-services.xml b/dspace/config/spring/api/external-services.xml index 97ca0c45de67..2ca8ebf58565 100644 --- a/dspace/config/spring/api/external-services.xml +++ b/dspace/config/spring/api/external-services.xml @@ -67,6 +67,7 @@ + From 9cd790cbd9901b8b87c2921cfa107efce317122b Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 13 Oct 2023 16:10:54 +0300 Subject: [PATCH 418/686] [CST-11738] fixed broken ITs --- .../org/dspace/authority/orcid/MockOrcid.java | 8 + .../dspace/authority/orcid/orcid-record.xml | 270 ++++++++++++++++++ .../app/rest/OrcidExternalSourcesIT.java | 12 +- .../org/dspace/app/rest/orcid-record.xml | 270 ++++++++++++++++++ 4 files changed, 554 insertions(+), 6 deletions(-) create mode 100644 dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml diff --git a/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java b/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java index 562aa86a585e..88c29fd23344 100644 --- a/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java +++ b/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java @@ -51,6 +51,14 @@ public InputStream answer(InvocationOnMock invocation) { } }); + when(orcidRestConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) + .thenAnswer(new Answer() { + @Override + public InputStream answer(InvocationOnMock invocation) { + return this.getClass().getResourceAsStream("orcid-record.xml"); + } + }); + setOrcidRestConnector(orcidRestConnector); } diff --git a/dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml b/dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml new file mode 100644 index 000000000000..7672e980c8bd --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/authority/orcid/orcid-record.xml @@ -0,0 +1,270 @@ + + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + + en + + + Direct + 2023-09-19T12:25:43.445Z + 2023-10-12T14:19:06.983Z + true + true + true + + + 2023-10-12T13:28:14.550Z + + 2023-09-19T12:25:43.736Z + 2023-09-19T12:25:43.736Z + Andrea + Bollini + + + 2023-10-12T13:28:14.550Z + + 2023-10-05T07:56:29.001Z + 2023-10-12T13:28:14.550Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + isco + + + 2023-10-12T13:28:14.541Z + 2023-10-12T13:28:14.541Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Bollini, Andrea + + + + 2023-10-12T13:27:57.187Z + + 2023-10-12T10:35:14.406Z + 2023-10-12T13:27:57.187Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Linkedin + https://it.linkedin.com/in/andreabollini + + + 2023-10-12T13:27:57.183Z + 2023-10-12T13:27:57.183Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + 4science + https://www.4science.it/ + + + + 2023-10-12T10:38:48.105Z + + 2023-10-12T10:33:21.077Z + 2023-10-12T10:38:48.105Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + test-orcid@mailinator.com + + + + + + 2023-09-04T09:04:52.121Z + + 2023-01-13T11:20:13.803Z + 2023-01-13T11:48:02.979Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 57432999200 + http://www.scopus.com/inward/authorDetails.url?authorID=57432999200&partnerID=MN8TOARS + self + + + 2023-01-19T14:25:14.512Z + 2023-01-19T14:25:14.512Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 35233141600 + http://www.scopus.com/inward/authorDetails.url?authorID=35233141600&partnerID=MN8TOARS + self + + + + + 2023-10-12T14:19:06.992Z + + + + 2023-10-12T10:52:26.965Z + + 2023-10-12T10:52:26.965Z + + + 2023-10-12T10:52:26.965Z + 2023-10-12T10:52:26.965Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Dspace + + Milan + IT + + + + + + 2023-10-12T10:35:49.079Z + + + 2023-10-12T10:34:17.514Z + 2023-10-12T10:35:49.079Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + 4Science + + Milan + IT + + + https://ror.org/03vb2cr34 + ROR + + + + + + + + + + + + + + 2023-10-12T14:19:06.992Z + + 2023-10-12T14:19:06.992Z + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + eid + 55484808800 + 55484808800 + self + + + + 2023-10-12T14:09:25.415Z + 2023-10-12T14:19:06.992Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Publication Metadata in CERIF: Inspiration by FRBR + + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + issn + 1877-0509 + 1877-0509 + https://portal.issn.org/resource/ISSN/1877-0509 + part-of + + + eid + 55484808800 + 55484808800 + self + + + http://dx.doi.org/10.1016/j.procs.2014.06.008 + journal-article + + 2014 + + Procedia Computer Science + + + + + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java index 04592c17da2d..2e797f9ce4ab 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidExternalSourcesIT.java @@ -150,10 +150,10 @@ public void findOneExternalSourcesMockitoTest() throws Exception { OrcidRestConnector orcidConnector = Mockito.mock(OrcidRestConnector.class); OrcidRestConnector realConnector = orcidV3AuthorDataProvider.getOrcidRestConnector(); orcidV3AuthorDataProvider.setOrcidRestConnector(orcidConnector); - when(orcidConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) + when(orcidConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) .thenAnswer(new Answer() { public InputStream answer(InvocationOnMock invocation) { - return getClass().getResourceAsStream("orcid-person-record.xml"); + return getClass().getResourceAsStream("orcid-record.xml"); } }); @@ -193,10 +193,10 @@ public InputStream answer(InvocationOnMock invocation) { return getClass().getResourceAsStream("orcid-search.xml"); } }); - when(orcidConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) + when(orcidConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) .thenAnswer(new Answer() { public InputStream answer(InvocationOnMock invocation) { - return getClass().getResourceAsStream("orcid-person-record.xml"); + return getClass().getResourceAsStream("orcid-record.xml"); } }); String q = "orcid:0000-0002-9029-1854"; @@ -246,10 +246,10 @@ public InputStream answer(InvocationOnMock invocation) { return getClass().getResourceAsStream("orcid-search.xml"); } }); - when(orcidConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) + when(orcidConnector.get(ArgumentMatchers.matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), ArgumentMatchers.any())) .thenAnswer(new Answer() { public InputStream answer(InvocationOnMock invocation) { - return getClass().getResourceAsStream("orcid-person-record.xml"); + return getClass().getResourceAsStream("orcid-record.xml"); } }); String q = "family-name:bollini AND given-names:andrea"; diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml new file mode 100644 index 000000000000..7672e980c8bd --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/orcid-record.xml @@ -0,0 +1,270 @@ + + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + + en + + + Direct + 2023-09-19T12:25:43.445Z + 2023-10-12T14:19:06.983Z + true + true + true + + + 2023-10-12T13:28:14.550Z + + 2023-09-19T12:25:43.736Z + 2023-09-19T12:25:43.736Z + Andrea + Bollini + + + 2023-10-12T13:28:14.550Z + + 2023-10-05T07:56:29.001Z + 2023-10-12T13:28:14.550Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + isco + + + 2023-10-12T13:28:14.541Z + 2023-10-12T13:28:14.541Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Bollini, Andrea + + + + 2023-10-12T13:27:57.187Z + + 2023-10-12T10:35:14.406Z + 2023-10-12T13:27:57.187Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + Linkedin + https://it.linkedin.com/in/andreabollini + + + 2023-10-12T13:27:57.183Z + 2023-10-12T13:27:57.183Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + 4science + https://www.4science.it/ + + + + 2023-10-12T10:38:48.105Z + + 2023-10-12T10:33:21.077Z + 2023-10-12T10:38:48.105Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + test-orcid@mailinator.com + + + + + + 2023-09-04T09:04:52.121Z + + 2023-01-13T11:20:13.803Z + 2023-01-13T11:48:02.979Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 57432999200 + http://www.scopus.com/inward/authorDetails.url?authorID=57432999200&partnerID=MN8TOARS + self + + + 2023-01-19T14:25:14.512Z + 2023-01-19T14:25:14.512Z + + + https://sandbox.orcid.org/client/0000-0002-3609-4817 + 0000-0002-3609-4817 + sandbox.orcid.org + + Scopus Wizard + + Scopus Author ID + 35233141600 + http://www.scopus.com/inward/authorDetails.url?authorID=35233141600&partnerID=MN8TOARS + self + + + + + 2023-10-12T14:19:06.992Z + + + + 2023-10-12T10:52:26.965Z + + 2023-10-12T10:52:26.965Z + + + 2023-10-12T10:52:26.965Z + 2023-10-12T10:52:26.965Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Dspace + + Milan + IT + + + + + + 2023-10-12T10:35:49.079Z + + + 2023-10-12T10:34:17.514Z + 2023-10-12T10:35:49.079Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + 4Science + + Milan + IT + + + https://ror.org/03vb2cr34 + ROR + + + + + + + + + + + + + + 2023-10-12T14:19:06.992Z + + 2023-10-12T14:19:06.992Z + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + eid + 55484808800 + 55484808800 + self + + + + 2023-10-12T14:09:25.415Z + 2023-10-12T14:19:06.992Z + + + https://sandbox.orcid.org/0000-0002-9029-1854 + 0000-0002-9029-1854 + sandbox.orcid.org + + Andrea Bollini + + + Publication Metadata in CERIF: Inspiration by FRBR + + + + doi + 10.1016/j.procs.2014.06.008 + 10.1016/j.procs.2014.06.008 + https://doi.org/10.1016/j.procs.2014.06.008 + self + + + issn + 1877-0509 + 1877-0509 + https://portal.issn.org/resource/ISSN/1877-0509 + part-of + + + eid + 55484808800 + 55484808800 + self + + + http://dx.doi.org/10.1016/j.procs.2014.06.008 + journal-article + + 2014 + + Procedia Computer Science + + + + + \ No newline at end of file From 167f0ae5d68f1007adb5c04f1e193bd8828055a8 Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 13 Oct 2023 19:31:45 +0300 Subject: [PATCH 419/686] [CST-11738] fixed broken ITs testOrcidImportFiller --- .../src/test/java/org/dspace/authority/CrisConsumerIT.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java b/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java index 0cceb70bb218..3cbbe6850dc9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java @@ -22,6 +22,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.matches; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @@ -92,7 +93,7 @@ public class CrisConsumerIT extends AbstractControllerIntegrationTest { @Value("classpath:org/dspace/app/rest/simple-article.pdf") private Resource simpleArticle; - @Value("classpath:org/dspace/authority/orcid/orcid-person-record.xml") + @Value("classpath:org/dspace/authority/orcid/orcid-record.xml") private Resource orcidPersonRecord; private EPerson submitter; @@ -1058,7 +1059,7 @@ public void testOrcidImportFiller() throws Exception { String orcid = "0000-0002-9029-1854"; - when(mockOrcidConnector.get(eq(orcid + "/person"), any())) + when(mockOrcidConnector.get(matches("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$"), any())) .thenAnswer(i -> orcidPersonRecord.getInputStream()); try { @@ -1076,7 +1077,7 @@ public void testOrcidImportFiller() throws Exception { context.restoreAuthSystemState(); - verify(mockOrcidConnector).get(eq(orcid + "/person"), any()); + verify(mockOrcidConnector).get(eq(orcid), any()); verifyNoMoreInteractions(mockOrcidConnector); String authToken = getAuthToken(submitter.getEmail(), password); From 0ed1f52c6d5e660dd2b19525e9c0a9adaee6d2eb Mon Sep 17 00:00:00 2001 From: eskander Date: Tue, 17 Oct 2023 11:11:41 +0300 Subject: [PATCH 420/686] [DSC-1247] fixed broken ITs --- .../app/bulkimport/service/BulkImportWorkbookBuilderIT.java | 3 ++- .../integration/crosswalks/XlsCollectionCrosswalkIT.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java index eed2826ea67a..a76642790704 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java @@ -176,7 +176,8 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { String tempLocation = storeInTempLocation(workbook); - String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", tempLocation }; + String[] args = new String[] { "bulk-import", "-c", publications.getID().toString(), "-f", tempLocation, + "-e", admin.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, eperson); diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java index 8929e4d65116..6ed7d8ba3aa7 100644 --- a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/XlsCollectionCrosswalkIT.java @@ -154,7 +154,7 @@ public void testBulkImportOfCollectionDisseminate() throws Exception { } String[] args = new String[] { "bulk-import", "-c", collection.getID().toString(), - "-f", tempWorkbookFile.getAbsolutePath() }; + "-f", tempWorkbookFile.getAbsolutePath(), "-e", admin.getEmail()}; TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); From 668b5b24dfad097eb8a7ee584992358269043fa3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= Date: Fri, 6 Oct 2023 10:04:41 +0200 Subject: [PATCH 421/686] #8585 Add submitter information to provenance metadata (cherry picked from commit c15ac0eb4a3d39a0de47adbfa5260a6f3b396837) --- .../xmlworkflow/XmlWorkflowServiceImpl.java | 25 ++++++++++++------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java index da7910da29f2..51292fd4773a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java @@ -221,6 +221,8 @@ public XmlWorkflowItem start(Context context, WorkspaceItem wsi) //Get our next step, if none is found, archive our item firstStep = wf.getNextStep(context, wfi, firstStep, ActionResult.OUTCOME_COMPLETE); if (firstStep == null) { + // record the submitted provenance message + recordStart(context, wfi.getItem(),null); archive(context, wfi); } else { activateFirstStep(context, wf, firstStep, wfi); @@ -334,7 +336,7 @@ protected void activateFirstStep(Context context, Workflow wf, Step firstStep, X + "item_id=" + wfi.getItem().getID() + "collection_id=" + wfi.getCollection().getID())); - // record the start of the workflow w/provenance message +// record the start of the workflow w/provenance message recordStart(context, wfi.getItem(), firstActionConfig.getProcessingAction()); //Fire an event ! @@ -1187,25 +1189,30 @@ protected void recordStart(Context context, Item myitem, Action action) DCDate now = DCDate.getCurrent(); // Create provenance description - String provmessage = ""; + StringBuffer provmessage = new StringBuffer(); if (myitem.getSubmitter() != null) { - provmessage = "Submitted by " + myitem.getSubmitter().getFullName() - + " (" + myitem.getSubmitter().getEmail() + ") on " - + now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; + provmessage.append("Submitted by ").append(myitem.getSubmitter().getFullName()) + .append(" (").append(myitem.getSubmitter().getEmail()).append(") on ") + .append(now.toString()); } else { // else, null submitter - provmessage = "Submitted by unknown (probably automated) on" - + now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; + provmessage.append("Submitted by unknown (probably automated) on") + .append(now.toString()); + } + if (action != null) { + provmessage.append(" workflow start=").append(action.getProvenanceStartId()).append("\n"); + } else { + provmessage.append("\n"); } // add sizes and checksums of bitstreams - provmessage += installItemService.getBitstreamProvenanceMessage(context, myitem); + provmessage.append(installItemService.getBitstreamProvenanceMessage(context, myitem)); // Add message to the DC itemService .addMetadata(context, myitem, MetadataSchemaEnum.DC.getName(), - "description", "provenance", "en", provmessage); + "description", "provenance", "en", provmessage.toString()); itemService.update(context, myitem); } From 0ddebd926f055a0aefcc0b64b5d9e5f1442c2c3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= Date: Mon, 16 Oct 2023 09:33:54 +0200 Subject: [PATCH 422/686] 8585 - added provenance to metadata-import and itemImport (cherry picked from commit ea6307dcc68a75c935049a02022145691693cff4) --- .../dspace/app/bulkedit/MetadataImport.java | 4 ++++ .../app/itemimport/ItemImportServiceImpl.java | 4 ++++ .../content/InstallItemServiceImpl.java | 24 +++++++++++++++++++ .../content/service/InstallItemService.java | 11 +++++++++ 4 files changed, 43 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java index 9044c723ff53..af6976acb14a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java @@ -578,6 +578,10 @@ public List runImport(Context c, boolean change, wfItem = workflowService.startWithoutNotify(c, wsItem); } } else { + // Add provenance info + String provenance = installItemService.getSubmittedByProvenanceMessage(c, wsItem.getItem()); + itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provenance); // Install the item installItemService.installItem(c, wsItem); } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java index 4148232cf3ba..255f4bdcbb15 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java @@ -774,6 +774,10 @@ protected Item addItem(Context c, List mycollections, String path, // put item in system if (!isTest) { try { + // Add provenance info + String provenance = installItemService.getSubmittedByProvenanceMessage(c, wi.getItem()); + itemService.addMetadata(c, wi.getItem(), MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provenance); installItemService.installItem(c, wi, myhandle); } catch (Exception e) { workspaceItemService.deleteAll(c, wi); diff --git a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java index 32c5b92c605b..e3e2025b6188 100644 --- a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java @@ -271,4 +271,28 @@ public String getBitstreamProvenanceMessage(Context context, Item myitem) return myMessage.toString(); } + + @Override + public String getSubmittedByProvenanceMessage(Context context, Item item) throws SQLException { + // get date + DCDate now = DCDate.getCurrent(); + + // Create provenance description + StringBuffer provmessage = new StringBuffer(); + + if (item.getSubmitter() != null) { + provmessage.append("Submitted by ").append(item.getSubmitter().getFullName()) + .append(" (").append(item.getSubmitter().getEmail()).append(") on ") + .append(now.toString()); + } else { + // else, null submitter + provmessage.append("Submitted by unknown (probably automated) on") + .append(now.toString()); + } + provmessage.append("\n"); + + // add sizes and checksums of bitstreams + provmessage.append(getBitstreamProvenanceMessage(context, item)); + return provmessage.toString(); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java b/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java index 67ac2e20499c..d00c62cc91d8 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java @@ -83,4 +83,15 @@ public Item restoreItem(Context c, InProgressSubmission is, public String getBitstreamProvenanceMessage(Context context, Item myitem) throws SQLException; + /** + * Generate provenance description of direct item submission (not through workflow). + * + * @param context context + * @param item the item to generate description for + * @return provenance description + * @throws SQLException if database error + */ + public String getSubmittedByProvenanceMessage(Context context, Item item) + throws SQLException;; + } From c032f3a041c6e1f0607822efce33dbb782210b1a Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Tue, 17 Oct 2023 11:34:04 +0300 Subject: [PATCH 423/686] dspace-api: fix misaligned comment (cherry picked from commit 4fba787322803cc36ef267f0d6913b92c1eaeca4) --- .../java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java index 51292fd4773a..bc91a1fd9298 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java @@ -336,7 +336,7 @@ protected void activateFirstStep(Context context, Workflow wf, Step firstStep, X + "item_id=" + wfi.getItem().getID() + "collection_id=" + wfi.getCollection().getID())); -// record the start of the workflow w/provenance message + // record the start of the workflow w/provenance message recordStart(context, wfi.getItem(), firstActionConfig.getProcessingAction()); //Fire an event ! From be60d936d8c05a8f1e11a9d9551aa93bbd1b9272 Mon Sep 17 00:00:00 2001 From: frabacche Date: Tue, 17 Oct 2023 17:45:30 +0200 Subject: [PATCH 424/686] DSC-1304 expose dspace and cris versions in two different attributes --- .../dspace/app/rest/converter/RootConverter.java | 3 ++- .../java/org/dspace/app/rest/model/RootRest.java | 13 ++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java index 61f18a5b3c9c..94835b969689 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java @@ -28,7 +28,8 @@ public RootRest convert() { rootRest.setDspaceName(configurationService.getProperty("dspace.name")); rootRest.setDspaceUI(configurationService.getProperty("dspace.ui.url")); rootRest.setDspaceServer(configurationService.getProperty("dspace.server.url")); - rootRest.setDspaceVersion("DSpace " + getSourceVersion()); + rootRest.setDspaceVersion(configurationService.getProperty("dspace.version")); + rootRest.setCrisVersion("DSpace " + getSourceVersion()); return rootRest; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java index cef8965601ca..a538a89b9670 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java @@ -21,8 +21,9 @@ public class RootRest extends RestAddressableModel { private String dspaceName; private String dspaceServer; private String dspaceVersion; + private String crisVersion; - public String getCategory() { + public String getCategory() { return CATEGORY; } @@ -67,6 +68,14 @@ public void setDspaceVersion(String dspaceVersion) { this.dspaceVersion = dspaceVersion; } + public String getCrisVersion() { + return crisVersion; + } + + public void setCrisVersion(String crisVersion) { + this.crisVersion = crisVersion; + } + @Override public boolean equals(Object object) { return (object instanceof RootRest && @@ -76,6 +85,7 @@ public boolean equals(Object object) { .append(this.getDspaceUI(), ((RootRest) object).getDspaceUI()) .append(this.getDspaceName(), ((RootRest) object).getDspaceName()) .append(this.getDspaceServer(), ((RootRest) object).getDspaceServer()) + .append(this.getCrisVersion(), ((RootRest)object).getCrisVersion()) .isEquals()); } @@ -88,6 +98,7 @@ public int hashCode() { .append(this.getDspaceName()) .append(this.getDspaceUI()) .append(this.getDspaceServer()) + .append(this.getCrisVersion()) .toHashCode(); } } From b5c979a257cb967077251cbabbf1a63e021507a0 Mon Sep 17 00:00:00 2001 From: frabacche Date: Wed, 18 Oct 2023 08:35:53 +0200 Subject: [PATCH 425/686] DSC-1304 DSpaceVersion stored into a Java constants --- dspace-api/src/main/java/org/dspace/core/CrisConstants.java | 2 +- .../main/java/org/dspace/app/rest/converter/RootConverter.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java index 18def2d10316..eb503b9aff8e 100644 --- a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java +++ b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java @@ -21,7 +21,7 @@ public class CrisConstants { * same number than the parent leading metadata */ public static final String PLACEHOLDER_PARENT_METADATA_VALUE = "#PLACEHOLDER_PARENT_METADATA_VALUE#"; - + public static final String DSPACE_BASE_VERSION = "7.5"; public static final MetadataFieldName MD_ENTITY_TYPE = new MetadataFieldName("dspace", "entity", "type"); public static final MetadataFieldName MD_SUBMISSION_TYPE = new MetadataFieldName("cris", "submission", "definition"); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java index 94835b969689..a6aebb0bb9a2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java @@ -10,6 +10,7 @@ import static org.dspace.app.util.Util.getSourceVersion; import org.dspace.app.rest.model.RootRest; +import org.dspace.core.CrisConstants; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -28,7 +29,7 @@ public RootRest convert() { rootRest.setDspaceName(configurationService.getProperty("dspace.name")); rootRest.setDspaceUI(configurationService.getProperty("dspace.ui.url")); rootRest.setDspaceServer(configurationService.getProperty("dspace.server.url")); - rootRest.setDspaceVersion(configurationService.getProperty("dspace.version")); + rootRest.setDspaceVersion(CrisConstants.DSPACE_BASE_VERSION); rootRest.setCrisVersion("DSpace " + getSourceVersion()); return rootRest; } From b6598bd5b4924e6be71ddb3f4a2a6d940e5d7450 Mon Sep 17 00:00:00 2001 From: frabacche Date: Wed, 18 Oct 2023 08:44:32 +0200 Subject: [PATCH 426/686] DSC-1304 DSpaceVersion stored into a Java constants --- dspace-api/src/main/java/org/dspace/core/CrisConstants.java | 2 +- .../main/java/org/dspace/app/rest/converter/RootConverter.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java index eb503b9aff8e..5bce199cfd76 100644 --- a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java +++ b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java @@ -21,7 +21,7 @@ public class CrisConstants { * same number than the parent leading metadata */ public static final String PLACEHOLDER_PARENT_METADATA_VALUE = "#PLACEHOLDER_PARENT_METADATA_VALUE#"; - public static final String DSPACE_BASE_VERSION = "7.5"; + public static final String DSPACE_BASE_VERSION = "DSpace 7.5"; public static final MetadataFieldName MD_ENTITY_TYPE = new MetadataFieldName("dspace", "entity", "type"); public static final MetadataFieldName MD_SUBMISSION_TYPE = new MetadataFieldName("cris", "submission", "definition"); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java index a6aebb0bb9a2..b6d9499d5f77 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java @@ -30,7 +30,7 @@ public RootRest convert() { rootRest.setDspaceUI(configurationService.getProperty("dspace.ui.url")); rootRest.setDspaceServer(configurationService.getProperty("dspace.server.url")); rootRest.setDspaceVersion(CrisConstants.DSPACE_BASE_VERSION); - rootRest.setCrisVersion("DSpace " + getSourceVersion()); + rootRest.setCrisVersion(getSourceVersion()); return rootRest; } } From 051e17e6a1b436f7d793e311acc0f7a1d82a41f7 Mon Sep 17 00:00:00 2001 From: frabacche Date: Wed, 18 Oct 2023 11:55:30 +0200 Subject: [PATCH 427/686] DSC-1304 test fix --- .../java/org/dspace/app/rest/converter/RootConverterTest.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java index acb3b0c263d0..73b12848e790 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/converter/RootConverterTest.java @@ -13,6 +13,7 @@ import org.dspace.app.rest.model.RootRest; import org.dspace.app.util.Util; +import org.dspace.core.CrisConstants; import org.dspace.services.ConfigurationService; import org.junit.Before; import org.junit.Test; @@ -52,7 +53,8 @@ public void testCorrectPropertiesSetFromConfigurationService() throws Exception assertEquals("dspaceurl", rootRest.getDspaceUI()); assertEquals("dspacename", rootRest.getDspaceName()); assertEquals(restUrl, rootRest.getDspaceServer()); - assertEquals("DSpace " + Util.getSourceVersion(), rootRest.getDspaceVersion()); + assertEquals(CrisConstants.DSPACE_BASE_VERSION, rootRest.getDspaceVersion()); + assertEquals(Util.getSourceVersion(), rootRest.getCrisVersion()); } @Test From a5c859ed9e70b4d04c3303a0b8494f5e174270bc Mon Sep 17 00:00:00 2001 From: frabacche Date: Wed, 18 Oct 2023 14:44:07 +0200 Subject: [PATCH 428/686] DSC-1304 checkstyle! --- .../org/dspace/app/rest/converter/RootConverter.java | 2 +- .../java/org/dspace/app/rest/model/RootRest.java | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java index b6d9499d5f77..1d81e308e39f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RootConverter.java @@ -30,7 +30,7 @@ public RootRest convert() { rootRest.setDspaceUI(configurationService.getProperty("dspace.ui.url")); rootRest.setDspaceServer(configurationService.getProperty("dspace.server.url")); rootRest.setDspaceVersion(CrisConstants.DSPACE_BASE_VERSION); - rootRest.setCrisVersion(getSourceVersion()); + rootRest.setCrisVersion(getSourceVersion()); return rootRest; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java index a538a89b9670..9fd6a1263423 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RootRest.java @@ -23,7 +23,7 @@ public class RootRest extends RestAddressableModel { private String dspaceVersion; private String crisVersion; - public String getCategory() { + public String getCategory() { return CATEGORY; } @@ -69,12 +69,12 @@ public void setDspaceVersion(String dspaceVersion) { } public String getCrisVersion() { - return crisVersion; - } + return crisVersion; + } - public void setCrisVersion(String crisVersion) { - this.crisVersion = crisVersion; - } + public void setCrisVersion(String crisVersion) { + this.crisVersion = crisVersion; + } @Override public boolean equals(Object object) { From c35dc5babb06f4af6be0b66962a2517f6ffd9082 Mon Sep 17 00:00:00 2001 From: eskander Date: Thu, 19 Oct 2023 14:15:56 +0300 Subject: [PATCH 429/686] [CST-7695] detect the duplication and return list of matched objects into ExternalDataObject --- .../service/impl/SolrDedupServiceImpl.java | 16 +++ .../utils/MD5ValueSignature.java | 100 ++++++++++++++++++ .../app/deduplication/utils/Signature.java | 5 + .../external/model/ExternalDataObject.java | 15 +++ .../service/impl/ExternalDataServiceImpl.java | 64 ++++++++++- .../ExternalSourceEntryRestConverter.java | 37 +++++++ .../rest/model/ExternalSourceEntryRest.java | 11 ++ .../rest/ExternalSourcesRestControllerIT.java | 59 +++++++++++ .../provider/impl/MockDataProvider.java | 1 + 9 files changed, 307 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java index 6f719ff85f2c..cc5c0f2bc861 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java @@ -314,6 +314,22 @@ private void fillSignature(Context ctx, DSpaceObject iu, Map plainSignatures = algo.getPlainSignature(iu, ctx); + for (String signature : plainSignatures) { + if (StringUtils.isNotEmpty(signature)) { + String key = "plain_" + algo.getSignatureType() + "_signature"; + if (tmpMapFilter.containsKey(key)) { + List obj = tmpMapFilter.get(key); + obj.add(signature); + tmpMapFilter.put(key, obj); + } else { + List obj = new ArrayList(); + obj.add(signature); + tmpMapFilter.put(key, obj); + } + } + } } } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java index 8b047584bc1b..aacc7aa9ae0e 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/MD5ValueSignature.java @@ -6,6 +6,7 @@ * http://www.dspace.org/license/ */ package org.dspace.app.deduplication.utils; + import java.io.UnsupportedEncodingException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -13,6 +14,7 @@ import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.stream.Collectors; import com.ibm.icu.text.CharsetDetector; import com.ibm.icu.text.CharsetMatch; @@ -22,12 +24,15 @@ import org.apache.logging.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; +import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; +import org.dspace.external.model.ExternalDataObject; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItemService; import org.dspace.workflow.factory.WorkflowServiceFactory; @@ -95,6 +100,37 @@ public List getSignature(DSpaceObject item, Context context) { } } + public List getPlainSignature(DSpaceObject item, Context context) { + List result = new ArrayList(); + try { + MessageDigest digester = MessageDigest.getInstance("MD5"); + List values = getMultiValue(item, metadata); + if (values != null) { + for (String value : values) { + if (StringUtils.isNotEmpty(value)) { + String valueNorm = normalize(item, value); + digester.update(valueNorm.getBytes("UTF-8")); + byte[] signature = digester.digest(); + char[] arr = new char[signature.length << 1]; + for (int i = 0; i < signature.length; i++) { + int b = signature[i]; + int idx = i << 1; + arr[idx] = HEX_DIGITS[(b >> 4) & 0xf]; + arr[idx + 1] = HEX_DIGITS[b & 0xf]; + } + String sigString = new String(arr); + result.add(sigString); + } + } + } + return result; + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e.getMessage(), e); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + protected String normalize(DSpaceObject item, Context context, String value) { if (value != null) { String temp = StringUtils.EMPTY; @@ -210,6 +246,70 @@ protected List getMultiValue(DSpaceObject item, String metadata) { return retValue; } + public List getSignature(ExternalDataObject object) { + List result = new ArrayList(); + try { + MessageDigest digester = MessageDigest.getInstance("MD5"); + List values = getMultiValue(object, metadata); + if (values != null) { + for (String value : values) { + if (StringUtils.isNotEmpty(value)) { + String valueNorm = normalize(object, value); + digester.update(valueNorm.getBytes("UTF-8")); + byte[] signature = digester.digest(); + char[] arr = new char[signature.length << 1]; + for (int i = 0; i < signature.length; i++) { + int b = signature[i]; + int idx = i << 1; + arr[idx] = HEX_DIGITS[(b >> 4) & 0xf]; + arr[idx + 1] = HEX_DIGITS[b & 0xf]; + } + String sigString = new String(arr); + result.add(sigString); + } + } + } + return result; + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e.getMessage(), e); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + protected List getMultiValue(ExternalDataObject object, String metadata) { + return object.getMetadata() + .stream() + .filter(metadataValueDTO -> + new MetadataFieldName(metadataValueDTO.getSchema(), metadataValueDTO.getElement(), + metadataValueDTO.getQualifier()).toString().equals(metadata)) + .map(MetadataValueDTO::getValue) + .collect(Collectors.toList()); + } + + protected String normalize(ExternalDataObject object, String value) { + String result = value; + if (StringUtils.isEmpty(value)) { + if (StringUtils.isNotEmpty(prefix)) { + result = prefix + object.getId(); + } else { + result = "entity:" + object.getId(); + } + } else { + for (String prefix : ignorePrefix) { + if (value.startsWith(prefix)) { + result = value.substring(prefix.length()); + break; + } + } + if (StringUtils.isNotEmpty(prefix)) { + result = prefix + result; + } + } + + return result; + } + public String getMetadata() { return metadata; } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java index 2bf662b39d75..81a0fb228911 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/Signature.java @@ -11,10 +11,15 @@ import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.external.model.ExternalDataObject; public interface Signature { public List getSignature(/* BrowsableDSpaceObject */DSpaceObject item, Context context); + public List getPlainSignature(DSpaceObject item, Context context); + + public List getSignature(ExternalDataObject object); + public int getResourceTypeID(); public String getSignatureType(); diff --git a/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java b/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java index eac9921df6cc..44ad6a70953e 100644 --- a/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java +++ b/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.UUID; import org.dspace.content.dto.MetadataValueDTO; @@ -38,6 +39,8 @@ public class ExternalDataObject { */ private String displayValue; + private List matchUUIDs; + /** * Default constructor */ @@ -143,4 +146,16 @@ public String getValue() { public void setValue(String value) { this.value = value; } + + public List getMatchUUIDs() { + return matchUUIDs; + } + + public void setMatchUUIDs(List matchUUIDs) { + this.matchUUIDs = matchUUIDs; + } + + public boolean isDuplicated() { + return !matchUUIDs.isEmpty(); + } } diff --git a/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java b/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java index 7804dfa5689f..76e4fff4f527 100644 --- a/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java @@ -7,12 +7,24 @@ */ package org.dspace.external.service.impl; +import static org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl.RESOURCE_FLAG_FIELD; +import static org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl.RESOURCE_IDS_FIELD; +import static org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl.RESOURCE_SIGNATURE_FIELD; + import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.UUID; import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.dspace.app.deduplication.service.DedupService; +import org.dspace.app.deduplication.service.impl.SolrDedupServiceImpl; +import org.dspace.app.deduplication.utils.Signature; import org.dspace.app.suggestion.SuggestionProvider; import org.dspace.app.suggestion.SuggestionService; import org.dspace.authorize.AuthorizeException; @@ -22,11 +34,14 @@ import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.discovery.SearchServiceException; import org.dspace.external.model.ExternalDataObject; import org.dspace.external.provider.ExternalDataProvider; import org.dspace.external.service.ExternalDataService; +import org.dspace.utils.DSpace; import org.springframework.beans.factory.annotation.Autowired; /** @@ -49,6 +64,9 @@ public class ExternalDataServiceImpl implements ExternalDataService { @Autowired private SuggestionService suggestionService; + @Autowired + private DedupService dedupService; + @Override public Optional getExternalDataObject(String source, String id) { ExternalDataProvider provider = getExternalDataProvider(source); @@ -64,9 +82,53 @@ public List searchExternalDataObjects(String source, String if (provider == null) { throw new IllegalArgumentException("Provider for: " + source + " couldn't be found"); } - return provider.searchExternalDataObjects(query, start, limit); + + List externalDataObjects = provider.searchExternalDataObjects(query, start, limit); + appendMatchedUUIDs(externalDataObjects); + + return externalDataObjects; + } + + private void appendMatchedUUIDs(List externalDataObjects) { + for (ExternalDataObject externalDataObject : externalDataObjects) { + List uuids = new ArrayList<>(); + try { + QueryResponse response = dedupService.find("*:*", buildFilters(externalDataObject)); + for (SolrDocument resultDoc : response.getResults()) { + uuids.addAll(resultDoc.getFieldValues(RESOURCE_IDS_FIELD) + .stream() + .map(id -> + UUID.fromString(String.valueOf(id))) + .collect(Collectors.toList())); + } + externalDataObject.setMatchUUIDs(uuids); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } + } + } + + private String[] buildFilters(ExternalDataObject object) { + List filters = new ArrayList<>(); + List allSignatures = getAllSignatures(object); + + if (!allSignatures.isEmpty()) { + filters.add(RESOURCE_FLAG_FIELD + ":" + SolrDedupServiceImpl.DeduplicationFlag.FAKE.getDescription()); + filters.add(RESOURCE_SIGNATURE_FIELD + ":(" + + StringUtils.joinWith(" OR ", allSignatures.stream().toArray(String[]::new)) + ")"); + } + + return filters.toArray(new String[filters.size()]); } + private List getAllSignatures(ExternalDataObject iu) { + List signAlgo = new DSpace().getServiceManager().getServicesByType(Signature.class); + return signAlgo.stream() + .filter(algo -> Constants.ITEM == algo.getResourceTypeID()) + .flatMap(algo -> algo.getSignature(iu).stream()) + .filter(signature -> StringUtils.isNotEmpty(signature)) + .collect(Collectors.toList()); + } @Override public List getExternalDataProviders() { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java index 585de2a99a57..35921c16d254 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ExternalSourceEntryRestConverter.java @@ -7,9 +7,19 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + import org.dspace.app.rest.model.ExternalSourceEntryRest; +import org.dspace.app.rest.model.ItemRest; import org.dspace.app.rest.projection.Projection; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; import org.dspace.external.model.ExternalDataObject; +import org.dspace.web.ContextUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -22,6 +32,12 @@ public class ExternalSourceEntryRestConverter implements DSpaceConverter convertToItemRests(List uuids, Projection projection) { + + if (uuids == null) { + return List.of(); + } + + Context context = ContextUtil.obtainCurrentRequestContext(); + return uuids.stream() + .map(uuid -> { + try { + return itemService.find(context, uuid); + } catch (SQLException e) { + throw new RuntimeException(e); + } + }) + .filter(item -> Objects.nonNull(item)) + .map(item -> itemConverter.convert(item, projection)) + .collect(Collectors.toList()); + } + public Class getModelClass() { return ExternalDataObject.class; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java index 06af7e222713..4e578c313870 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ExternalSourceEntryRest.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.model; +import java.util.List; + import org.dspace.app.rest.ExternalSourcesRestController; /** @@ -38,6 +40,7 @@ public String getType() { private String value; private String externalSource; private MetadataRest metadata = new MetadataRest(); + private List matchObjects; /** * Generic getter for the id @@ -118,4 +121,12 @@ public MetadataRest getMetadata() { public void setMetadata(MetadataRest metadata) { this.metadata = metadata; } + + public List getMatchObjects() { + return matchObjects; + } + + public void setMatchObjects(List matchObjects) { + this.matchObjects = matchObjects; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java index 565a4d003f78..7c396e803537 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ExternalSourcesRestControllerIT.java @@ -19,15 +19,18 @@ import org.dspace.app.rest.matcher.EntityTypeMatcher; import org.dspace.app.rest.matcher.ExternalSourceEntryMatcher; import org.dspace.app.rest.matcher.ExternalSourceMatcher; +import org.dspace.app.rest.matcher.ItemMatcher; import org.dspace.app.rest.matcher.PageMatcher; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; import org.dspace.builder.WorkflowItemBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.EntityType; +import org.dspace.content.Item; import org.dspace.core.CrisConstants; import org.dspace.external.provider.AbstractExternalDataProvider; import org.dspace.external.provider.ExternalDataProvider; @@ -485,4 +488,60 @@ public void findSupportedEntityTypesOfAnExternalDataProviderPaginationTest() thr } } + @Test + public void findOneExternalSourceEntriesDuplicationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + // create item withDoiIdentifier equals 10.1016/j.procs.2017.03.031 + Item itemOne = ItemBuilder.createItem(context, col1) + .withFullName("Public item one") + .withIssueDate("2023-10-17") + .withDoiIdentifier("10.1016/j.procs.2017.03.031") + .withEntityType("Publication") + .build(); + + // create another item withDoiIdentifier equals 10.1016/j.procs.2017.03.031 + Item itemTwo = ItemBuilder.createItem(context, col1) + .withFullName("Public item two") + .withIssueDate("2023-10-17") + .withDoiIdentifier("10.1016/j.procs.2017.03.031") + .withEntityType("Publication") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/integration/externalsources/mock/entries") + .param("query", "one").param("size", "1")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.externalSourceEntries", Matchers.hasItem( + ExternalSourceEntryMatcher.matchExternalSourceEntry("onetwo", "onetwo", "onetwo", "mock") + ))) + .andExpect(jsonPath("$._embedded.externalSourceEntries[0].matchObjects", containsInAnyOrder( + ItemMatcher.matchItemProperties(itemOne), + ItemMatcher.matchItemProperties(itemTwo) + ))) + .andExpect(jsonPath("$.page", PageMatcher.pageEntryWithTotalPagesAndElements(0, 1, 2, 2))); + + getClient().perform(get("/api/integration/externalsources/mock/entries") + .param("query", "one").param("size", "1").param("page", "1")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.externalSourceEntries", Matchers.hasItem( + ExternalSourceEntryMatcher.matchExternalSourceEntry("one", "one", "one", "mock") + ))) + .andExpect(jsonPath("$._embedded.externalSourceEntries[0].matchObjects", containsInAnyOrder( + ItemMatcher.matchItemProperties(itemOne), + ItemMatcher.matchItemProperties(itemTwo) + ))) + .andExpect(jsonPath("$.page", PageMatcher.pageEntryWithTotalPagesAndElements(1, 1, 2, 2))); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java b/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java index 894b8e409a4f..0a0b4f062d31 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java +++ b/dspace-server-webapp/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java @@ -85,6 +85,7 @@ public void init() throws IOException { externalDataObject.setDisplayValue(id); List list = new LinkedList<>(); list.add(new MetadataValueDTO("dc", "contributor", "author", null, "Donald, Smith")); + list.add(new MetadataValueDTO("dc", "identifier", "doi", null, "10.1016/j.procs.2017.03.031")); externalDataObject.setMetadata(list); mockLookupMap.put(id, externalDataObject); From 0921934ab4068f92b7bce09f6351f0fac63d4ebb Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Wed, 18 Oct 2023 14:49:32 +0200 Subject: [PATCH 430/686] [CST-10149] added logs for UpdateCrisMetricsWithExternalSource --- .../dspace/metrics/MetricsExternalServices.java | 4 ++++ .../UpdateCrisMetricsWithExternalSource.java | 10 ++++++++++ .../metrics/scopus/UpdateScopusMetrics.java | 16 +++++++++++++++- 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java b/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java index b680e43eeab7..79543f43ce1e 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java +++ b/dspace-api/src/main/java/org/dspace/metrics/MetricsExternalServices.java @@ -118,4 +118,8 @@ public int getFetchSize() { public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; } + + public List getLogs() { + return null; + } } diff --git a/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java b/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java index 12877252791b..82c690e62caa 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java +++ b/dspace-api/src/main/java/org/dspace/metrics/UpdateCrisMetricsWithExternalSource.java @@ -13,6 +13,7 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Function; @@ -108,6 +109,7 @@ public void internalRun() throws Exception { performUpdate(externalService); context.complete(); } catch (Exception e) { + getLogsFromMetricService(externalService); log.error(e.getMessage(), e); handler.handleException(e); context.abort(); @@ -184,6 +186,7 @@ private void performUpdateWithMultiFetch(MetricsExternalServices metricsServices long updatedItems = metricsServices.updateMetric(context, itemIterator, param); + getLogsFromMetricService(metricsServices); handler.logInfo("Updated " + updatedItems + " metrics"); handler.logInfo("Update end"); @@ -216,6 +219,7 @@ private void performUpdateWithSingleFetches(MetricsExternalServices metricsServi } context.commit(); + getLogsFromMetricService(metricsServices); handler.logInfo("Found " + countFoundItems + " items"); handler.logInfo("Updated " + countUpdatedItems + " metrics"); handler.logInfo("Update end"); @@ -240,4 +244,10 @@ private void assignSpecialGroupsInContext() throws SQLException { } } + private void getLogsFromMetricService(MetricsExternalServices metricsServices) { + List metricLogger = metricsServices.getLogs(); + if (metricLogger != null) { + metricLogger.forEach(message -> handler.logInfo(message)); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java b/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java index bd11feb99d3b..dae0a19ebbc0 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java +++ b/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java @@ -42,6 +42,8 @@ public class UpdateScopusMetrics extends MetricsExternalServices { public static final String SCOPUS_CITATION = "scopusCitation"; + private List logsCache = new ArrayList<>(); + @Autowired private ScopusProvider scopusProvider; @@ -61,6 +63,10 @@ public List getFilters() { return Arrays.asList("dspace.entity.type:Publication", "dc.identifier.doi:* OR dc.identifier.pmid:*"); } + public List getLogs() { + return logsCache; + } + @Override public boolean updateMetric(Context context, Item item, String param) { String id = buildQuery(item); @@ -82,10 +88,13 @@ public long updateMetric(Context context, Iterator itemIterator, String pa List itemList = new ArrayList<>(); for (int i = 0; i < fetchSize && itemIterator.hasNext(); i++) { Item item = itemIterator.next(); + logAndCache("Adding item with uuid: " + item.getID()); setLastImportMetadataValue(context, item); itemList.add(item); } foundItems += itemList.size(); + String id = this.generateQuery(queryMap, itemList); + logAndCache("Getting scopus metrics for " + id); updatedItems += scopusProvider.getScopusList(this.generateQuery(queryMap, itemList)) .stream() @@ -105,7 +114,7 @@ public long updateMetric(Context context, Iterator itemIterator, String pa log.error("Error while updating scopus' metrics", e); throw new RuntimeException(e.getMessage(), e); } finally { - log.info("Found and fetched {} with {} api calls!", foundItems, apiCalls); + logAndCache("Found and fetched " + foundItems + " with " + apiCalls + " api calls!"); } return updatedItems; } @@ -236,4 +245,9 @@ private Double getDeltaPeriod(CrisMetricDTO currentMetric, Optional } return null; } + + private void logAndCache(String message) { + logsCache.add(message); + log.info(message); + } } From 7221ebc10ca25ce62d634b8d0e10929cb29626ad Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Thu, 19 Oct 2023 15:32:33 +0200 Subject: [PATCH 431/686] [CST-10149] added new logs to UpdateCrisMetricsWithExternalSource and also now script will not be stopped in case of an error --- .../dspace/metrics/scopus/ScopusProvider.java | 42 ++++++++++++++----- .../metrics/scopus/UpdateScopusMetrics.java | 13 ++++-- 2 files changed, 42 insertions(+), 13 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java b/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java index cf3eb875eb74..9bafca2d5169 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java +++ b/dspace-api/src/main/java/org/dspace/metrics/scopus/ScopusProvider.java @@ -40,9 +40,15 @@ public class ScopusProvider { private static final Logger log = LogManager.getLogger(ScopusProvider.class); + private List logsCache = new ArrayList<>(); + @Autowired private ScopusRestConnector scopusRestConnector; + public List getLogs() { + return logsCache; + } + /** *

    * This methods fetch a list of metrics using the {@code id} param, @@ -54,6 +60,7 @@ public class ScopusProvider { * @return List of CrisMetrics fetched */ public List getScopusList(String id) { + logsCache = new ArrayList<>(); String scopusResponse = getRecords(id); if (StringUtils.isNotBlank(scopusResponse)) { List crisMetricList = mapToCrisMetricList(scopusResponse); @@ -66,7 +73,7 @@ public List getScopusList(String id) { } return crisMetricList; } - log.error("The query : " + id + " is wrong!"); + logAndCache("The query : " + id + " is wrong!"); return List.of(); } @@ -75,7 +82,7 @@ public CrisMetricDTO getScopusObject(String id) { if (StringUtils.isNotBlank(scopusResponse)) { return mapToCrisMetric(scopusResponse); } - log.error("The query : " + id + " is wrong!"); + logAndCache("The query : " + id + " is wrong!"); return null; } @@ -94,7 +101,7 @@ private CrisMetricDTO mapToCrisMetric(String scopusResponse) { docBuilder = docBuilderFactory.newDocumentBuilder(); parsedResponse = docBuilder.parse(new InputSource(new StringReader(scopusResponse))); } catch (ParserConfigurationException | SAXException | IOException e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return mapToCrisMetric(parsedResponse); } @@ -107,7 +114,7 @@ private List mapToCrisMetricList(String scopusResponse) { docBuilder = docBuilderFactory.newDocumentBuilder(); parsedResponse = docBuilder.parse(new InputSource(new StringReader(scopusResponse))); } catch (ParserConfigurationException | SAXException | IOException e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return mapToCrisMetricList(parsedResponse); } @@ -134,7 +141,7 @@ private String getNext(String scopusResponse) { .map(element -> element.getAttribute("href")) .orElse(null); } catch (ParserConfigurationException | SAXException | IOException e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return nextUrl; } @@ -148,7 +155,7 @@ private List mapToCrisMetricList(Document doc) { .filter(Objects::nonNull) .collect(Collectors.toList()); } catch (Exception e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return scopusCitationList; } @@ -162,7 +169,7 @@ private CrisMetricDTO mapToCrisMetric(Document doc) { .map(this::mapToCrisMetric) .orElse(null); } catch (Exception e) { - log.error(e.getMessage(), e); + logAndCacheError(e); } return scopusCitation; } @@ -170,13 +177,13 @@ private CrisMetricDTO mapToCrisMetric(Document doc) { private CrisMetricDTO mapToCrisMetric(Element dataRoot) { CrisMetricDTO scopusCitation = new CrisMetricDTO(); if (dataRoot == null) { - log.debug("No citation entry found in Scopus"); + logAndCache("No citation entry found in Scopus"); return scopusCitation; } Element errorScopusResp = XMLUtils.getSingleElement(dataRoot, "error"); if (errorScopusResp != null) { - log.debug("Error citation entry found in Scopus: " + errorScopusResp.getTextContent()); + logAndCache("Error citation entry found in Scopus: " + errorScopusResp.getTextContent()); return scopusCitation; } @@ -203,10 +210,25 @@ private CrisMetricDTO mapToCrisMetric(Element dataRoot) { try { scopusCitation.setMetricCount(Double.valueOf(numCitations)); } catch (NullPointerException | NumberFormatException ex) { - log.error("Error while trying to parse numCitations:" + numCitations); + logAndCacheErrorWithMessage("Error while trying to parse numCitations:" + numCitations, ex); } scopusCitation.setRemark(scopusCitation.buildMetricsRemark()); return scopusCitation; } + private void logAndCache(String message) { + logsCache.add("INFO: " + message); + log.debug(message); + } + + private void logAndCacheErrorWithMessage(String message, Throwable e) { + logsCache.add("ERROR: " + message + '\n' + e.getMessage()); + log.error(message, e); + } + + private void logAndCacheError(Throwable e) { + logsCache.add("ERROR: " + e.getMessage()); + log.error(e.getMessage(), e); + } + } \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java b/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java index dae0a19ebbc0..782f6f832fb2 100644 --- a/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java +++ b/dspace-api/src/main/java/org/dspace/metrics/scopus/UpdateScopusMetrics.java @@ -82,6 +82,7 @@ public long updateMetric(Context context, Iterator itemIterator, String pa long updatedItems = 0; long foundItems = 0; long apiCalls = 0; + logsCache = new ArrayList<>(); try { while (itemIterator.hasNext()) { Map queryMap = new HashMap<>(); @@ -111,11 +112,11 @@ public long updateMetric(Context context, Iterator itemIterator, String pa context.commit(); } } catch (SQLException e) { - log.error("Error while updating scopus' metrics", e); - throw new RuntimeException(e.getMessage(), e); + logAndCacheError("Error while updating scopus' metrics", e); } finally { logAndCache("Found and fetched " + foundItems + " with " + apiCalls + " api calls!"); } + logsCache.addAll(scopusProvider.getLogs()); return updatedItems; } @@ -222,6 +223,7 @@ private boolean updateScopusMetrics(Context context, Item currentItem, CrisMetri createNewScopusMetrics(context,currentItem, scopusMetric, deltaPeriod1, deltaPeriod2); } catch (SQLException | AuthorizeException e) { + logsCache.add(e.getMessage()); log.error(e.getMessage(), e); } return true; @@ -247,7 +249,12 @@ private Double getDeltaPeriod(CrisMetricDTO currentMetric, Optional } private void logAndCache(String message) { - logsCache.add(message); + logsCache.add("INFO: " + message); log.info(message); } + + private void logAndCacheError(String message, Throwable e) { + logsCache.add("ERROR: " + message + '\n' + e.getMessage()); + log.error(message, e); + } } From 92b31ba2956da97c49904718f13951c0108194fc Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 19 Oct 2023 18:05:03 +0200 Subject: [PATCH 432/686] [DSC-1312] ITs for hidden publications in profile --- .../layout/CrisLayoutTabRestRepositoryIT.java | 205 ++++++++++++++++++ 1 file changed, 205 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index 80ed84453088..f5057fdaccd4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -12,8 +12,10 @@ import static org.dspace.app.rest.matcher.CrisLayoutBoxMatcher.matchBox; import static org.dspace.app.rest.matcher.CrisLayoutTabMatcher.matchRest; import static org.dspace.app.rest.matcher.CrisLayoutTabMatcher.matchTab; +import static org.dspace.builder.RelationshipTypeBuilder.createRelationshipTypeBuilder; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -43,6 +45,7 @@ import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.util.RelationshipUtils; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; @@ -56,16 +59,22 @@ import org.dspace.builder.EntityTypeBuilder; import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.EntityType; +import org.dspace.content.EntityTypeServiceImpl; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; +import org.dspace.content.service.RelationshipService; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; @@ -104,6 +113,12 @@ public class CrisLayoutTabRestRepositoryIT extends AbstractControllerIntegration @Autowired private CrisLayoutTabService crisLayoutTabService; + @Autowired + protected EntityTypeService entityTypeService; + + @Autowired + protected RelationshipService relationshipService; + private final String METADATASECURITY_URL = "http://localhost:8080/api/core/metadatafield/"; /** @@ -1730,6 +1745,196 @@ public void findByItemTabsWithCustomSecurityLayoutAnonynousTest() throws Excepti .andExpect(jsonPath("$._embedded.tabs[0].rows[1].cells[0].boxes", contains(matchBox(box2)))); } + @Test + public void findByItemTabsWithHiddenRelationshipsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EntityType eType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Mecca", "Vincenzo") + .withEmail("vins@4science.com") + .withPassword(password) + .build(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + + Collection col1 = + CollectionBuilder.createCollection(context, community) + .withName("Test Publications") + .build(); + + Collection people = + CollectionBuilder.createCollection(context, community) + .withName("People") + .withEntityType("Person") + .build(); + + Item firstPerson = + ItemBuilder.createItem(context, people) + .withTitle("4Science, Vins") + .build(); + + // RELATION.Person.researchoutputs + CrisLayoutBoxBuilder.createBuilder(context, eType, CrisLayoutBoxTypes.RELATION.name(), true, true) + .withShortname("box-shortname-one") + .build(); + + CrisLayoutBox box1 = + CrisLayoutBoxBuilder.createBuilder(context, eType, CrisLayoutBoxTypes.RELATION.name(), true, true) + .withShortname("researchoutputs") + .withHeader("Publications") + .withSecurity(LayoutSecurity.PUBLIC) + .withType(CrisLayoutBoxTypes.RELATION.name()) + .build(); + + + CrisLayoutBox box2 = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, true) + .withShortname("box-shortname-two") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, "dc.title", 0, 0) + .withLabel("LABEL TITLE") + .withRendering("RENDERIGN TITLE") + .withRowStyle("STYLE") + .withBox(box2) + .build(); + + CrisLayoutTab tab = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("details") + .withHeader("Profile") + .addBoxIntoNewRow(box2) + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutTab tab1 = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("publications") + .withHeader("Publications") + .addBoxIntoNewRow(box1) + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + String tokenUserA = getAuthToken(userA.getEmail(), password); + getClient(tokenUserA).perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2))) + ) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + Item publication1 = + ItemBuilder.createItem(context, col1) + .withTitle("Title Of Item") + .withIssueDate("2015-06-25") + .withAuthor("4Science, Vins", firstPerson.getID().toString()) + .withEntityType("Publication") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))) + .andExpect(jsonPath("$._embedded.tabs", containsInAnyOrder(matchTab(tab), matchTab(tab1)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", contains(matchBox(box1)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()) + .andExpect(jsonPath("$._embedded.tabs[1].rows[1]").doesNotExist()); + + getClient(tokenUserA).perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))) + .andExpect(jsonPath("$._embedded.tabs", containsInAnyOrder(matchTab(tab), matchTab(tab1)))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2))) + ) + .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", contains(matchBox(box1))) + ) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()) + .andExpect(jsonPath("$._embedded.tabs[1].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + RelationshipType hiddenResearchOutput = + createRelationshipTypeBuilder( + context, null, entityTypeService.findByEntityType(context, "Person"), "isResearchoutputsHiddenFor", + "notDisplayingResearchoutputs", 0, null, 0, null + ).build(); + + final Relationship publicationOneHiddenByFirstPerson = + RelationshipBuilder.createRelationshipBuilder( + context, publication1, firstPerson, hiddenResearchOutput + ).build(); + + context.restoreAuthSystemState(); + try { + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + getClient(tokenUserA).perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", firstPerson.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", not(contains(matchTab(tab1))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect( + jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box2)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + } finally { + RelationshipBuilder.deleteRelationship(publicationOneHiddenByFirstPerson.getID()); + } + + } + @Test public void findThumbnailUsingLayoutTabBoxConfiguration() throws Exception { context.turnOffAuthorisationSystem(); From 4a4b886a423431343fa409d8b4bc69e2025b3be7 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 19 Oct 2023 18:06:02 +0200 Subject: [PATCH 433/686] [DSC-1312] Fixes tab with relation box shown only hidden entities --- .../configuration/DiscoveryConfigurationUtilsService.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java index e2e83920eb70..3db9e04c2694 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationUtilsService.java @@ -52,6 +52,7 @@ public Iterator findByRelation(Context context, Item item, String relation DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); discoverQuery.setDiscoveryConfigurationName(discoveryConfiguration.getId()); + discoverQuery.setScopeObject(new IndexableItem(item)); List defaultFilterQueries = discoveryConfiguration.getDefaultFilterQueries(); for (String defaultFilterQuery : defaultFilterQueries) { discoverQuery.addFilterQueries(MessageFormat.format(defaultFilterQuery, item.getID())); From dd976b693ad2b0b5c0de625871734903ffafaadf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marie-H=C3=A9l=C3=A8ne=20V=C3=A9zina?= Date: Wed, 11 Oct 2023 09:49:35 -0400 Subject: [PATCH 434/686] oai_openaire.xsl : change resourceTypeGeneral for thesis Thesis are "Literature" resource type (resourceTypeGeneral), not "other research product" ref: https://github.com/openaire/guidelines-literature-repositories/issues/43#issuecomment-1318262914 and https://api.openaire.eu/vocabularies/dnet:result_typologies/publication (cherry picked from commit 669ff343503539aa6fc8b23600989ab958a403b9) --- .../crosswalks/oai/metadataFormats/oai_openaire.xsl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 3a1d75eb56c6..16c63c9c1a13 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -1432,6 +1432,18 @@ literature + + literature + + + literature + + + literature + + + literature + dataset From 3beddec39294cad9f870d11ffd88cb7d41ea1ece Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 19 Oct 2023 21:54:32 +0200 Subject: [PATCH 435/686] [DSC-1312] Removes unused imports --- .../dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index f5057fdaccd4..c911253a6124 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -45,7 +45,6 @@ import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; -import org.dspace.app.util.RelationshipUtils; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; @@ -64,7 +63,6 @@ import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.EntityType; -import org.dspace.content.EntityTypeServiceImpl; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; From 80283045133ed5397b44fee4369e1f9831203b9b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Oct 2023 21:19:14 +0000 Subject: [PATCH 436/686] Bump org.eclipse.jetty:jetty-http Bumps [org.eclipse.jetty:jetty-http](https://github.com/eclipse/jetty.project) from 9.4.52.v20230823 to 9.4.53.v20231009. - [Release notes](https://github.com/eclipse/jetty.project/releases) - [Commits](https://github.com/eclipse/jetty.project/compare/jetty-9.4.52.v20230823...jetty-9.4.53.v20231009) --- updated-dependencies: - dependency-name: org.eclipse.jetty:jetty-http dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 19bf52de855e..4580e1daad0f 100644 --- a/pom.xml +++ b/pom.xml @@ -37,7 +37,7 @@ 2.3.1 1.1.0 - 9.4.52.v20230823 + 9.4.53.v20231009 2.20.0 2.0.28 1.19.0 From 45b22efb3ded7d5e43b8a4e7b700a52919518d0a Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 20 Oct 2023 12:08:19 +0300 Subject: [PATCH 437/686] [CST-11738] fixed broken ITs --- .../org/dspace/app/rest/ItemAuthorityIT.java | 32 +++++++++++++------ .../org/dspace/app/rest/OrcidAuthorityIT.java | 7 ++-- 2 files changed, 28 insertions(+), 11 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java index 9e0ea90afc69..cdfbf2f29b13 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemAuthorityIT.java @@ -127,19 +127,25 @@ public void singleItemAuthorityTest() throws Exception { Map.of("data-oairecerif_author_affiliation", "OrgUnit_1::" + orgUnit_1.getID(), "oairecerif_author_affiliation", "OrgUnit_1::" - + orgUnit_1.getID())), + + orgUnit_1.getID(), + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")), ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_2.getID().toString(), "Author 2", "Author 2", "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", "OrgUnit_1::" + orgUnit_1.getID(), "oairecerif_author_affiliation", "OrgUnit_1::" - + orgUnit_1.getID())), + + orgUnit_1.getID(), + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")), ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_3.getID().toString(), "Author 3", "Author 3", "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", "OrgUnit_2::" + orgUnit_2.getID(), "oairecerif_author_affiliation", "OrgUnit_2::" - + orgUnit_2.getID())) + + orgUnit_2.getID(), + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")) ))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); } @@ -216,13 +222,19 @@ public void multiItemAuthorityTest() throws Exception { "Author 2(OrgUnit_2)", "Author 2", "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", "OrgUnit_2::" + orgUnit_2.getID(), "oairecerif_author_affiliation", "OrgUnit_2::" + orgUnit_2.getID())), + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_1.getID().toString(), + "Author 1", "Author 1", "vocabularyEntry", + Map.of("data-person_identifier_orcid", "", "person_identifier_orcid", "")), + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_2.getID().toString(), + "Author 2", "Author 2", "vocabularyEntry", + Map.of("data-person_identifier_orcid", "", "person_identifier_orcid", "")), // filled with EditorAuthority extra metadata generator - ItemAuthorityMatcher.matchItemAuthorityProperties(author_1.getID().toString(), - "Author 1", "Author 1", "vocabularyEntry"), - ItemAuthorityMatcher.matchItemAuthorityProperties(author_2.getID().toString(), - "Author 2", "Author 2", "vocabularyEntry") + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_1.getID().toString(), + "Author 1", "Author 1", "vocabularyEntry", Map.of()), + ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_2.getID().toString(), + "Author 2", "Author 2", "vocabularyEntry", Map.of()) ))) - .andExpect(jsonPath("$.page.totalElements", Matchers.is(5))); + .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); } @Test @@ -250,7 +262,9 @@ public void singleItemAuthorityWithoutOrgUnitTest() throws Exception { .andExpect(jsonPath("$._embedded.entries", Matchers.contains( ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(author_1.getID().toString(), "Author 1", "Author 1", "vocabularyEntry", - Map.of("data-oairecerif_author_affiliation", "", "oairecerif_author_affiliation", "")) + Map.of("data-oairecerif_author_affiliation", "", "oairecerif_author_affiliation", "", + "data-person_identifier_orcid", "", + "person_identifier_orcid", "")) ))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java index a92253e54437..33bb24f8b029 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java @@ -669,8 +669,11 @@ private Item buildPerson(String title, Item affiliation) { private Matcher affiliationEntry(Item item, String title, String otherInfoValue) { return ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations(id(item), title, - title, "vocabularyEntry", Map.of("data-oairecerif_author_affiliation", otherInfoValue, - "oairecerif_author_affiliation", otherInfoValue)); + title, "vocabularyEntry", Map.of( + "data-oairecerif_author_affiliation", otherInfoValue, + "oairecerif_author_affiliation", otherInfoValue, + "data-" + ORCID_INFO, "", + ORCID_INFO, "")); } private Matcher orcidEntry(String title, String authorityPrefix, String orcid) { From 2b921dda62d4347e671ee6c72704a9412607d9f7 Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Fri, 20 Oct 2023 13:44:37 +0200 Subject: [PATCH 438/686] use common config parameters for oai openairecris service description --- dspace/config/crosswalks/oai/service-description.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace/config/crosswalks/oai/service-description.xml b/dspace/config/crosswalks/oai/service-description.xml index a1bde8827083..fdbf62612d40 100644 --- a/dspace/config/crosswalks/oai/service-description.xml +++ b/dspace/config/crosswalks/oai/service-description.xml @@ -2,6 +2,6 @@ https://www.openaire.eu/cerif-profile/vocab/OpenAIRE_Service_Compatibility#1.1 ${dspace.name} An example CRIS that complies with the OpenAIRE Guidelines for CRIS Managers v.1.1. - ${dspace.url} - http://${dspace.hostname}/oai/openairecris + ${dspace.ui.url} + ${dspace.server.url}/oai/openairecris From 8b0fd3a63066366e64d8da377f67c0ee7e42ca1d Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 20 Oct 2023 14:50:07 +0300 Subject: [PATCH 439/686] [DSC-1293] add a new metadata dc.title --- dspace/config/spring/api/discovery.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 48b75831f991..ed9e94021895 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -3025,6 +3025,7 @@ organization.legalName + dc.title From b58bb1917d608c9f6cca91ce3bc8fb22ff76ef20 Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Fri, 20 Oct 2023 17:38:49 +0200 Subject: [PATCH 440/686] configurable option to wxclude non-public metadata based on their metadata security level/visibility before calculating the metadata signature for orcid profile objects --- .../factory/impl/OrcidAffiliationFactory.java | 24 ++++++++++++++++++- .../impl/OrcidSimpleValueObjectFactory.java | 17 +++++++++++++ dspace/config/spring/api/orcid-services.xml | 8 +++++++ 3 files changed, 48 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java index 89a1ca3d83e4..26623a7ce820 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java @@ -50,6 +50,8 @@ public class OrcidAffiliationFactory extends AbstractOrcidProfileSectionFactory private String endDateField; + private boolean isAllowedMetadataVisibility = false; + public OrcidAffiliationFactory(OrcidProfileSectionType sectionType, OrcidProfileSyncPreference preference) { super(sectionType, preference); } @@ -93,7 +95,13 @@ public List getMetadataSignatures(Context context, Item item) { int groupSize = metadataGroups.getOrDefault(organizationField, Collections.emptyList()).size(); for (int currentGroupIndex = 0; currentGroupIndex < groupSize; currentGroupIndex++) { List metadataValues = getMetadataValueByPlace(metadataGroups, currentGroupIndex); - signatures.add(metadataSignatureGenerator.generate(context, metadataValues)); + //only "visible" metadatavalues within this group + metadataValues = metadataValues.stream() + .filter(metadataValue -> getAllowedMetadataVisibility(metadataValue)) + .collect(Collectors.toList()); + if (!metadataValues.isEmpty()) { + signatures.add(metadataSignatureGenerator.generate(context, metadataValues)); + } } return signatures; @@ -152,6 +160,13 @@ private boolean isUnprocessableValue(MetadataValue value) { return value == null || isBlank(value.getValue()) || value.getValue().equals(PLACEHOLDER_PARENT_METADATA_VALUE); } + private boolean getAllowedMetadataVisibility(MetadataValue metadataValue) { + if (isAllowedMetadataVisibility()) { + return metadataValue.getSecurityLevel() == null || metadataValue.getSecurityLevel() == 0; + } + return true; + } + private Map> getMetadataGroups(Item item) { Map> metadataGroups = new HashMap<>(); metadataGroups.put(organizationField, itemService.getMetadataByMetadataString(item, organizationField)); @@ -204,4 +219,11 @@ public void setEndDateField(String endDateField) { this.endDateField = endDateField; } + public boolean isAllowedMetadataVisibility() { + return isAllowedMetadataVisibility; + } + + public void setAllowedMetadataVisibility(boolean allowedMetadataVisibility) { + isAllowedMetadataVisibility = allowedMetadataVisibility; + } } diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java index 4ddfbe47a328..1f55c2d62e99 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java @@ -42,6 +42,8 @@ public class OrcidSimpleValueObjectFactory extends AbstractOrcidProfileSectionFa private List metadataFields = new ArrayList(); + private boolean isAllowedMetadataVisibility = false; + public OrcidSimpleValueObjectFactory(OrcidProfileSectionType sectionType, OrcidProfileSyncPreference preference) { super(sectionType, preference); } @@ -76,6 +78,7 @@ public Object create(Context context, List metadataValues) { public List getMetadataSignatures(Context context, Item item) { return metadataFields.stream() .flatMap(metadataField -> getMetadataValues(item, metadataField).stream()) + .filter(metadataValue -> getAllowedMetadataVisibility(metadataValue)) .map(metadataValue -> metadataSignatureGenerator.generate(context, List.of(metadataValue))) .collect(Collectors.toList()); } @@ -137,6 +140,13 @@ private Address createAddress(Country country) { return address; } + private boolean getAllowedMetadataVisibility(MetadataValue metadataValue) { + if (isAllowedMetadataVisibility()) { + return metadataValue.getSecurityLevel() == null || metadataValue.getSecurityLevel() == 0; + } + return true; + } + public void setMetadataFields(String metadataFields) { this.metadataFields = metadataFields != null ? asList(metadataFields.split(",")) : emptyList(); } @@ -146,4 +156,11 @@ public List getMetadataFields() { return metadataFields; } + public boolean isAllowedMetadataVisibility() { + return isAllowedMetadataVisibility; + } + + public void setAllowedMetadataVisibility(boolean allowedMetadataVisibility) { + isAllowedMetadataVisibility = allowedMetadataVisibility; + } } diff --git a/dspace/config/spring/api/orcid-services.xml b/dspace/config/spring/api/orcid-services.xml index eb4e20a459c2..d9c9c6d5f249 100644 --- a/dspace/config/spring/api/orcid-services.xml +++ b/dspace/config/spring/api/orcid-services.xml @@ -112,6 +112,7 @@ + @@ -121,6 +122,7 @@ + @@ -130,36 +132,42 @@ + + + + + + From ffcc9831bc6230214cb2d4c1063b599e6b79735a Mon Sep 17 00:00:00 2001 From: eskander Date: Fri, 20 Oct 2023 18:45:41 +0300 Subject: [PATCH 441/686] [CST-11738] fixed broken ITs --- .../org/dspace/app/rest/OrcidAuthorityIT.java | 39 +++++++++---------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java index 33bb24f8b029..6a8a579dc139 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java @@ -8,7 +8,6 @@ package org.dspace.app.rest; import static org.dspace.authority.service.AuthorityValueService.GENERATE; -import static org.dspace.authority.service.AuthorityValueService.REFERENCE; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.mockito.ArgumentMatchers.anyInt; @@ -188,9 +187,9 @@ public void testWithWillBeReferencedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -237,9 +236,9 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(10))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -285,7 +284,7 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333")))) + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -298,8 +297,8 @@ public void testWithPagination() throws Exception { .param("size", "5")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -312,7 +311,7 @@ public void testWithPagination() throws Exception { .param("size", "6")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(6))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -533,9 +532,9 @@ public void testWithComposedName() throws Exception { .param("filter", "John Bruce Wayne")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -568,9 +567,9 @@ public void testWithLastNameAndFirstNameSeparatedByComma() throws Exception { .param("filter", "Wayne, Bruce")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -599,9 +598,9 @@ public void testWithAffiliationExtra() throws Exception { .param("filter", "author")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntryWithAffiliation("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444", "Org1, Org2"), - orcidEntryWithAffiliation("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777", "Organization")))) + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), + orcidEntryWithAffiliation("Author From Orcid 2", GENERATE, "0000-2222-3333-4444", "Org1, Org2"), + orcidEntryWithAffiliation("Author From Orcid 3", GENERATE, "0000-5555-6666-7777", "Organization")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); From ad7f506b2b72610eb593c21d7726833cd11d8058 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 21:57:32 +0000 Subject: [PATCH 442/686] Bump org.json:json from 20230227 to 20231013 in /dspace-api Bumps [org.json:json](https://github.com/douglascrockford/JSON-java) from 20230227 to 20231013. - [Release notes](https://github.com/douglascrockford/JSON-java/releases) - [Changelog](https://github.com/stleary/JSON-java/blob/master/docs/RELEASES.md) - [Commits](https://github.com/douglascrockford/JSON-java/commits) --- updated-dependencies: - dependency-name: org.json:json dependency-type: direct:production ... Signed-off-by: dependabot[bot] (cherry picked from commit 11a08f1ac0a9b75bf3f2869d3760b2f0e229aefe) --- dspace-api/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index ec9c423a9ea7..94b53e567cd9 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -779,7 +779,7 @@ org.json json - 20230227 + 20231013 From 093785e5e222f73b847477e633a41ea577bafc11 Mon Sep 17 00:00:00 2001 From: damian Date: Fri, 21 Jul 2023 12:02:12 +0200 Subject: [PATCH 443/686] Reading localized license file. --- .../org/dspace/core/LicenseServiceImpl.java | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java index 8324105a3085..f99b3c31e51c 100644 --- a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java @@ -17,9 +17,12 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; +import javax.servlet.http.HttpServletRequest; import org.dspace.core.service.LicenseService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.services.model.Request; +import org.dspace.web.ContextUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,13 +104,14 @@ public String getLicenseText(String licenseFile) { /** * Get the site-wide default license that submitters need to grant * + * Localized license requires: default_{{locale}}.license file. + * Locale also must be listed in webui.supported.locales setting. + * * @return the default license */ @Override public String getDefaultSubmissionLicense() { - if (null == license) { - init(); - } + init(); return license; } @@ -115,9 +119,8 @@ public String getDefaultSubmissionLicense() { * Load in the default license. */ protected void init() { - File licenseFile = new File( - DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") - + File.separator + "config" + File.separator + "default.license"); + Context context = obtainContext(); + File licenseFile = new File(I18nUtil.getDefaultLicense(context)); FileInputStream fir = null; InputStreamReader ir = null; @@ -169,4 +172,14 @@ protected void init() { } } } + + private Context obtainContext() { + Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest(); + if (currentRequest != null) { + HttpServletRequest request = currentRequest.getHttpServletRequest(); + return ContextUtil.obtainContext(request); + } else { + return new Context(); + } + } } From b21b5e009647c77730d99b1afad1bfb4258221a7 Mon Sep 17 00:00:00 2001 From: damian Date: Fri, 21 Jul 2023 12:02:12 +0200 Subject: [PATCH 444/686] Reading localized license file. --- .../src/main/java/org/dspace/core/LicenseServiceImpl.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java index f99b3c31e51c..5e42b04e7170 100644 --- a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java @@ -173,6 +173,9 @@ protected void init() { } } + /** + * Obtaining current request context + */ private Context obtainContext() { Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest(); if (currentRequest != null) { From fa01fde364c12f82105981ffcd1a1204ceccffc4 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 20 Oct 2023 16:35:14 -0500 Subject: [PATCH 445/686] Applying commit 6108c98d82f3cbf00e248f83535b06da4040ece4 --- .../org/dspace/core/LicenseServiceImpl.java | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java index 5e42b04e7170..d895f9a76481 100644 --- a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java @@ -174,15 +174,22 @@ protected void init() { } /** - * Obtaining current request context + * Obtaining current request context. + * Return new context if getting one from current request failed. + * + * @return DSpace context object */ private Context obtainContext() { - Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest(); - if (currentRequest != null) { - HttpServletRequest request = currentRequest.getHttpServletRequest(); - return ContextUtil.obtainContext(request); - } else { - return new Context(); + try { + Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest(); + if (currentRequest != null) { + HttpServletRequest request = currentRequest.getHttpServletRequest(); + return ContextUtil.obtainContext(request); + } + } catch (Exception e) { + log.error("Can't load current request context."); } + + return new Context(); } } From b249f48c6acc22b2901c460fa3e82cb21001a528 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 23 Oct 2023 17:03:20 +0200 Subject: [PATCH 446/686] [DSC-950] Improved CrisLayoutTabRestRepositoryITs --- .../layout/CrisLayoutTabRestRepositoryIT.java | 193 ++++++++++++++++++ 1 file changed, 193 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index 80ed84453088..7c3dff3d9a21 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -16,6 +16,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -63,6 +64,7 @@ import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; +import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; @@ -104,6 +106,9 @@ public class CrisLayoutTabRestRepositoryIT extends AbstractControllerIntegration @Autowired private CrisLayoutTabService crisLayoutTabService; + @Autowired + private BitstreamService bitstreamService; + private final String METADATASECURITY_URL = "http://localhost:8080/api/core/metadatafield/"; /** @@ -1786,6 +1791,194 @@ public void findThumbnailUsingLayoutTabBoxConfiguration() throws Exception { } + @Test + public void excludeThumbnailNegativeMetadataValueMatcherTabBoxConfiguration() throws Exception { + context.turnOffAuthorisationSystem(); + EntityType eType = + EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + // Setting up configuration for dc.type = logo with rendering thumbnail + MetadataField metadataField = + mfss.findByElement(context, "dc", "type", null); + + CrisLayoutBox box = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, false) + .withShortname("researcherprofile") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutField field = + CrisLayoutFieldBuilder.createBistreamField(context, metadataField, "ORIGINAL", 0, 0, 0) + .withRendering("thumbnail") + .withBox(box) + .build(); + + // filter out bitstreams with "personal picture" as dc.type + ((CrisLayoutFieldBitstream)field).setMetadataValue("!personal picture"); + + CrisLayoutTab tab = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("otherinfo") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("Other") + .addBoxIntoNewRow(box) + .build(); + + Community community = CommunityBuilder.createCommunity(context).build(); + Collection personCollection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, personCollection).withEntityType("Person").build(); + + Bundle original = BundleBuilder.createBundle(context, item).withName("ORIGINAL").build(); + + org.dspace.content.Bitstream bitstream0 = + BitstreamBuilder.createBitstream(context, original, InputStream.nullInputStream()) + .withType("logo") + .build(); + + original.setPrimaryBitstreamID(bitstream0); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + original = context.reloadEntity(original); + org.dspace.content.Bitstream bitstream1 = + BitstreamBuilder.createBitstream(context, original, InputStream.nullInputStream()) + .withType("personal picture") + .build(); + original.setPrimaryBitstreamID(bitstream1); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(box)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + + context.turnOffAuthorisationSystem(); + + bitstream0 = context.reloadEntity(bitstream0); + + bitstreamService.delete(context, bitstream0); + + context.commit(); + context.restoreAuthSystemState(); + + context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(0))) + .andExpect(jsonPath("$._embedded.tabs").doesNotExist()); + + } + + @Test + public void excludeThumbnailNegativeMetadataValueMatcherTabMultiBoxConfiguration() throws Exception { + context.turnOffAuthorisationSystem(); + EntityType eType = + EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + // Setting up configuration for dc.type = logo with rendering thumbnail + MetadataField dcType = + mfss.findByElement(context, "dc", "type", null); + MetadataField dcTitle = + mfss.findByElement(context, "dc", "title", null); + + CrisLayoutBox thumbnailBox = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, false) + .withShortname("researcherprofile") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + CrisLayoutBox titleBox = + CrisLayoutBoxBuilder.createBuilder(context, eType, true, false) + .withShortname("title") + .withSecurity(LayoutSecurity.PUBLIC) + .build(); + + CrisLayoutField thumbnailField = + CrisLayoutFieldBuilder.createBistreamField(context, dcType, "ORIGINAL", 0, 0, 0) + .withRendering("thumbnail") + .withBox(thumbnailBox) + .build(); + + // filter out bitstreams with "personal picture" as dc.type + ((CrisLayoutFieldBitstream)thumbnailField).setMetadataValue("!personal picture"); + + CrisLayoutField titleField = + CrisLayoutFieldBuilder.createMetadataField(context, dcTitle, 0, 0) + .withRendering("heading") + .withBox(titleBox) + .build(); + + CrisLayoutTab tab = + CrisLayoutTabBuilder.createTab(context, eType, 0) + .withShortName("otherinfo") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("Other") + .addBoxIntoNewRow(thumbnailBox) + .addBoxIntoNewRow(titleBox) + .build(); + + Community community = CommunityBuilder.createCommunity(context).build(); + Collection personCollection = CollectionBuilder.createCollection(context, community).build(); + Item item = + ItemBuilder.createItem(context, personCollection) + .withEntityType("Person") + .withTitle("Custom Person") + .build(); + + Bundle original = + BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + org.dspace.content.Bitstream bitstream0 = + BitstreamBuilder.createBitstream(context, original, InputStream.nullInputStream()) + .withType("personal picture") + .build(); + + original.setPrimaryBitstreamID(bitstream0); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tab)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", + not(contains(matchBox(thumbnailBox), matchBox(titleBox))))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(titleBox)))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); + } + private CrisLayoutTabRest parseJson(String name) throws Exception { return new ObjectMapper().readValue(getFileInputStream(name), CrisLayoutTabRest.class); } From 5b4703a1d68d1407628f7b9832f9f30bb9f1c38b Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Wed, 25 Oct 2023 13:22:13 +0200 Subject: [PATCH 447/686] [CST-11738] setting default behavior for orcid to 'will be referenced' --- .../org/dspace/app/rest/OrcidAuthorityIT.java | 41 ++++++++++--------- dspace/config/modules/orcid.cfg | 2 +- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java index 6a8a579dc139..69458b6f9a6d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.authority.service.AuthorityValueService.GENERATE; +import static org.dspace.authority.service.AuthorityValueService.REFERENCE; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.mockito.ArgumentMatchers.anyInt; @@ -140,7 +141,7 @@ public void testWithWillBeGeneratedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) @@ -187,9 +188,9 @@ public void testWithWillBeReferencedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -236,9 +237,9 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(10))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -284,7 +285,7 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333")))) + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -297,8 +298,8 @@ public void testWithPagination() throws Exception { .param("size", "5")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -311,7 +312,7 @@ public void testWithPagination() throws Exception { .param("size", "6")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(6))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -532,9 +533,9 @@ public void testWithComposedName() throws Exception { .param("filter", "John Bruce Wayne")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -567,9 +568,9 @@ public void testWithLastNameAndFirstNameSeparatedByComma() throws Exception { .param("filter", "Wayne, Bruce")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -598,9 +599,9 @@ public void testWithAffiliationExtra() throws Exception { .param("filter", "author")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntryWithAffiliation("Author From Orcid 2", GENERATE, "0000-2222-3333-4444", "Org1, Org2"), - orcidEntryWithAffiliation("Author From Orcid 3", GENERATE, "0000-5555-6666-7777", "Organization")))) + orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), + orcidEntryWithAffiliation("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444", "Org1, Org2"), + orcidEntryWithAffiliation("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777", "Organization")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); diff --git a/dspace/config/modules/orcid.cfg b/dspace/config/modules/orcid.cfg index a702eec3acf2..bc4d8cfdd90e 100644 --- a/dspace/config/modules/orcid.cfg +++ b/dspace/config/modules/orcid.cfg @@ -16,7 +16,7 @@ orcid.webhook.registration-mode = only_linked # your webhooks orcid.webhook.registration-token = 01dfd257-c13f-43df-a0e2-9bb6c3cc7069 -orcid.authority.prefix = will be generated::ORCID:: +orcid.authority.prefix = will be referenced::ORCID:: orcid.linkable-metadata-fields.ignore = From eeadc4697bbdba66c054970812408ccbc07240a3 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Wed, 25 Oct 2023 13:25:09 +0200 Subject: [PATCH 448/686] [CST-11738] fix IT --- .../src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java index 69458b6f9a6d..33bb24f8b029 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java @@ -141,7 +141,7 @@ public void testWithWillBeGeneratedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) From 1722d98b43c8d803efe125de4685cbb9eefc2669 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 26 Oct 2023 14:57:19 +0200 Subject: [PATCH 449/686] [DSC-1324][CST-12375] Fixes ResearcherProfile creation for ORCID auth --- .../java/org/dspace/content/ItemServiceImpl.java | 10 ++++++++-- .../org/dspace/content/service/ItemService.java | 13 +++++++++++++ .../profile/ResearcherProfileServiceImpl.java | 7 +++++-- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 3ad03377cb27..cfad7f87dba5 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -298,9 +298,7 @@ private List getThumbnailFields(List crisLayoutT * @param context * @param item * @param bundle - * @param metadata * @param value - * @param requireOriginal * @throws SQLException * @return Bitstream */ @@ -2138,4 +2136,12 @@ public boolean isLatestVersion(Context context, Item item) throws SQLException { } + @Override + public void addResourcePolicy(Context context, Item item, int actionID, EPerson eperson) + throws SQLException, AuthorizeException { + ResourcePolicy resourcePolicy = + this.authorizeService.createResourcePolicy(context, item, null, eperson, actionID, null); + item.getResourcePolicies().add(resourcePolicy); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index e6823690743d..44ba4a6bcd99 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -922,4 +922,17 @@ public Iterator findRelatedItemsByAuthorityControlledFields(Context contex */ public boolean isLatestVersion(Context context, Item item) throws SQLException; + /** + * Adds a resource policy to the specified item for the given action and EPerson. + * + * @param context the DSpace context + * @param item the item to add the policy to + * @param actionID the ID of the action to add the policy for + * @param eperson the EPerson to add the policy for + * @throws SQLException if a database error occurs + * @throws AuthorizeException if the current user is not authorized to perform this action + */ + void addResourcePolicy(Context context, Item item, int actionID, EPerson eperson) + throws SQLException, AuthorizeException; + } diff --git a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java index cec440df6d45..32915d74c0cf 100644 --- a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java @@ -314,17 +314,20 @@ private Item createProfileItem(Context context, EPerson ePerson, Collection coll item = installItemService.installItem(context, workspaceItem); + context.uncacheEntity(workspaceItem); + if (isNewProfileNotVisibleByDefault()) { Group anonymous = groupService.findByName(context, ANONYMOUS); authorizeService.removeGroupPolicies(context, item, anonymous); } - authorizeService.addPolicy(context, item, READ, ePerson); + itemService.addResourcePolicy(context, item, READ, ePerson); if (isAdditionOfWritePolicyOnProfileEnabled()) { - authorizeService.addPolicy(context, item, WRITE, ePerson); + itemService.addResourcePolicy(context, item, WRITE, ePerson); } + return reloadItem(context, item); } From 1c7e9ea1f10df92631cbd48f424d5608fab02219 Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Tue, 10 Oct 2023 16:19:11 +0200 Subject: [PATCH 450/686] quote Pattern for thumbnail resolution constructed from bitstream filename (cherry picked from commit a1248074681a7bc4603176fb3e7d989b91edcbcd) --- .../dspace/content/BitstreamServiceImpl.java | 2 +- .../app/rest/BitstreamRestRepositoryIT.java | 47 +++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index cc89cea33a25..7433338ad94c 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -403,7 +403,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + Pattern pattern = Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 8b34edb938a6..b4c0a5c11e53 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -1694,6 +1694,53 @@ public void thumbnailEndpointTest() throws Exception { .andExpect(jsonPath("$.type", is("bitstream"))); } + @Test + public void thumbnailEndpointTestWithSpecialCharactersInFileName() throws Exception { + // Given an Item + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1").build(); + + Item item = ItemBuilder.createItem(context, col1) + .withTitle("Test item -- thumbnail") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + + Bundle originalBundle = BundleBuilder.createBundle(context, item) + .withName(Constants.DEFAULT_BUNDLE_NAME) + .build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item) + .withName("THUMBNAIL") + .build(); + + InputStream is = IOUtils.toInputStream("dummy", "utf-8"); + + // With an ORIGINAL Bitstream & matching THUMBNAIL Bitstream containing special characters in filenames + Bitstream bitstream = BitstreamBuilder.createBitstream(context, originalBundle, is) + .withName("test (2023) file.pdf") + .withMimeType("application/pdf") + .build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, is) + .withName("test (2023) file.pdf.jpg") + .withMimeType("image/jpeg") + .build(); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/thumbnail")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.uuid", Matchers.is(thumbnail.getID().toString()))) + .andExpect(jsonPath("$.type", is("bitstream"))); + } + @Test public void thumbnailEndpointMultipleThumbnailsWithPrimaryBitstreamTest() throws Exception { // Given an Item From af5632c2f249f95f86c0f009194646d8c966a68d Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Thu, 12 Oct 2023 17:58:13 +0200 Subject: [PATCH 451/686] check null value of bitstream name before quoting name for regex (cherry picked from commit a9bcc0c223d0219f464d986d7b7c66b3c4cbc39c) --- .../main/java/org/dspace/content/BitstreamServiceImpl.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 7433338ad94c..92acce676584 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -403,7 +403,9 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); + Pattern pattern = Pattern.compile("^" + + (bitstream.getName() != null ? Pattern.quote(bitstream.getName()) : bitstream.getName()) + + ".([^.]+)$"); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { From 9d099dcf3fc17b7b25b6faa7b3bb17d5e3eefecc Mon Sep 17 00:00:00 2001 From: "Gantner, Florian Klaus" Date: Thu, 19 Oct 2023 15:44:03 +0200 Subject: [PATCH 452/686] extract bitstream thumbnail name pattern into own function (cherry picked from commit c5466c2249c092f6638a7072b57c934d1d3581b5) --- .../java/org/dspace/content/BitstreamServiceImpl.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 92acce676584..16532660561d 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -403,9 +403,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + - (bitstream.getName() != null ? Pattern.quote(bitstream.getName()) : bitstream.getName()) - + ".([^.]+)$"); + Pattern pattern = getBitstreamNamePattern(bitstream); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { @@ -422,6 +420,13 @@ public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLEx return null; } + protected Pattern getBitstreamNamePattern(Bitstream bitstream) { + if (bitstream.getName() != null) { + return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); + } + return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + } + @Override public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException { if (bitstream.getBitstreamFormat() == null) { From 7cdc7108952556e166cd9cf4dcc104f8b3674328 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Tue, 1 Aug 2023 17:13:07 -0400 Subject: [PATCH 453/686] Make workflow curation tasks actually work. When curation runs, there was no "current user" and no claimed task, so the code broke when trying to find people to notify about curation failures. (cherry picked from commit a76af35a0cd4f0c0e8737c736578b17bcc349691) --- .../curate/XmlWorkflowCuratorServiceImpl.java | 29 ++++++++++++++----- .../dspace/eperson/EPersonServiceImpl.java | 18 ++++++++++++ .../eperson/service/EPersonService.java | 12 ++++++++ 3 files changed, 52 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java index 05c7a8d99930..dd6c8d5e154a 100644 --- a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.List; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; @@ -47,14 +48,17 @@ * Manage interactions between curation and workflow. A curation task can be * attached to a workflow step, to be executed during the step. * + *

    + * NOTE: when run in workflow, curation tasks run with + * authorization disabled. + * * @see CurationTaskConfig * @author mwood */ @Service public class XmlWorkflowCuratorServiceImpl implements XmlWorkflowCuratorService { - private static final Logger LOG - = org.apache.logging.log4j.LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); @Autowired(required = true) protected XmlWorkflowFactory workflowFactory; @@ -97,7 +101,13 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) throws AuthorizeException, IOException, SQLException { Curator curator = new Curator(); curator.setReporter(reporter); - return curate(curator, c, wfi); + c.turnOffAuthorisationSystem(); + if (null == c.getCurrentUser()) { // We need someone to email + c.setCurrentUser(ePersonService.findAnAdministrator(c)); + } + boolean failedP = curate(curator, c, wfi); + c.restoreAuthSystemState(); + return failedP; } @Override @@ -123,7 +133,7 @@ public boolean curate(Curator curator, Context c, XmlWorkflowItem wfi) item.setOwningCollection(wfi.getCollection()); for (Task task : step.tasks) { curator.addTask(task.name); - curator.curate(item); + curator.curate(c, item); int status = curator.getStatus(task.name); String result = curator.getResult(task.name); String action = "none"; @@ -223,8 +233,12 @@ protected void notifyContacts(Context c, XmlWorkflowItem wfi, String status, String action, String message) throws AuthorizeException, IOException, SQLException { List epa = resolveContacts(c, task.getContacts(status), wfi); - if (epa.size() > 0) { + if (!epa.isEmpty()) { workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message); + } else { + LOG.warn("No contacts were found for workflow item {}: " + + "task {} returned action {} with message {}", + wfi.getID(), task.name, action, message); } } @@ -247,8 +261,7 @@ protected List resolveContacts(Context c, List contacts, // decode contacts if ("$flowgroup".equals(contact)) { // special literal for current flowgoup - ClaimedTask claimedTask = claimedTaskService.findByWorkflowIdAndEPerson(c, wfi, c.getCurrentUser()); - String stepID = claimedTask.getStepID(); + String stepID = getFlowStep(c, wfi).step; Step step; try { Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); @@ -266,11 +279,13 @@ protected List resolveContacts(Context c, List contacts, epList.addAll(group.getMembers()); } } else if ("$colladmin".equals(contact)) { + // special literal for collection administrators Group adGroup = wfi.getCollection().getAdministrators(); if (adGroup != null) { epList.addAll(groupService.allMembers(c, adGroup)); } } else if ("$siteadmin".equals(contact)) { + // special literal for site administrator EPerson siteEp = ePersonService.findByEmail(c, configurationService.getProperty("mail.admin")); if (siteEp != null) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index 61477995c7ed..e3b743c4a711 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -47,6 +47,7 @@ import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.services.ConfigurationService; import org.dspace.util.UUIDUtils; import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; @@ -101,6 +102,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme protected VersionDAO versionDAO; @Autowired(required = true) protected ClaimedTaskService claimedTaskService; + @Autowired(required = true) + protected ConfigurationService configurationService; @Autowired protected OrcidTokenService orcidTokenService; @@ -113,6 +116,21 @@ public EPerson find(Context context, UUID id) throws SQLException { return ePersonDAO.findByID(context, EPerson.class, id); } + @Override + public EPerson findAnAdministrator(Context c) + throws SQLException { + List contacts = groupService.findByName(c, Group.ADMIN).getMembers(); + EPerson currentUser; + if (contacts.isEmpty()) { + log.warn("Administrators group is empty"); + currentUser = findByEmail(c, configurationService.getProperty("mail.admin")); + // Null if no such EPerson + } else { + currentUser = contacts.get(0); + } + return currentUser; + } + @Override public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException { if (StringUtils.isNumeric(id)) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index c5c9801c16dd..c3def01a829d 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -157,6 +157,18 @@ public List findAll(Context context, int sortField) public List findAll(Context context, int sortField, int pageSize, int offset) throws SQLException; + /** + * Try very hard to find an administrator's account. Might return a member + * of the Administrators group, or an account with a configured email + * address. + * + * @param context current DSpace session. + * @return a presumed administrator account, or null if none could be found. + * @throws SQLException + */ + public EPerson findAnAdministrator(Context context) + throws SQLException; + /** * Create a new eperson * From 4b62c9b605eeca4d246bf1fe6396d51e385b94b0 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 2 Aug 2023 16:25:46 -0400 Subject: [PATCH 454/686] Community request: fake EPerson from configuration. (cherry picked from commit bb9e88d1bb452d0865f4827134baf907e6d34044) --- .../curate/XmlWorkflowCuratorServiceImpl.java | 7 ++++- .../dspace/eperson/EPersonServiceImpl.java | 29 ++++++++++++------- .../eperson/service/EPersonService.java | 12 ++++---- 3 files changed, 32 insertions(+), 16 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java index dd6c8d5e154a..97537befd2f4 100644 --- a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java @@ -102,10 +102,15 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) Curator curator = new Curator(); curator.setReporter(reporter); c.turnOffAuthorisationSystem(); + boolean wasAnonymous = false; if (null == c.getCurrentUser()) { // We need someone to email - c.setCurrentUser(ePersonService.findAnAdministrator(c)); + wasAnonymous = true; + c.setCurrentUser(ePersonService.getSystemEPerson(c)); } boolean failedP = curate(curator, c, wfi); + if (wasAnonymous) { + c.setCurrentUser(null); + } c.restoreAuthSystemState(); return failedP; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index e3b743c4a711..2d0574a6301d 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -116,19 +116,28 @@ public EPerson find(Context context, UUID id) throws SQLException { return ePersonDAO.findByID(context, EPerson.class, id); } + /** + * Create a fake EPerson which can receive email. Its address will be the + * value of "mail.admin", or "postmaster" if all else fails. + * @param c + * @return + * @throws SQLException + */ @Override - public EPerson findAnAdministrator(Context c) + public EPerson getSystemEPerson(Context c) throws SQLException { - List contacts = groupService.findByName(c, Group.ADMIN).getMembers(); - EPerson currentUser; - if (contacts.isEmpty()) { - log.warn("Administrators group is empty"); - currentUser = findByEmail(c, configurationService.getProperty("mail.admin")); - // Null if no such EPerson - } else { - currentUser = contacts.get(0); + String adminEmail = configurationService.getProperty("mail.admin"); + if (null == adminEmail) { + adminEmail = "postmaster"; // Last-ditch attempt to send *somewhere* } - return currentUser; + EPerson systemEPerson = findByEmail(c, adminEmail); + + if (null == systemEPerson) { + systemEPerson = new EPerson(); + systemEPerson.setEmail(adminEmail); + } + + return systemEPerson; } @Override diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index c3def01a829d..47be942e97e9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -13,6 +13,7 @@ import java.util.Date; import java.util.List; import java.util.Set; +import javax.validation.constraints.NotNull; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; @@ -158,15 +159,16 @@ public List findAll(Context context, int sortField, int pageSize, int o throws SQLException; /** - * Try very hard to find an administrator's account. Might return a member - * of the Administrators group, or an account with a configured email - * address. + * The "System EPerson" is a fake account that exists only to receive email. + * It has an email address that should be presumed usable. It does not + * exist in the database and is not complete. * * @param context current DSpace session. - * @return a presumed administrator account, or null if none could be found. + * @return an EPerson that can presumably receive email. * @throws SQLException */ - public EPerson findAnAdministrator(Context context) + @NotNull + public EPerson getSystemEPerson(Context context) throws SQLException; /** From 7a2b0188432ce8c81e7f7cbac9233efca127b067 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 2 Aug 2023 17:23:36 -0400 Subject: [PATCH 455/686] Correct some documentation. (cherry picked from commit be22790aad7f627e2ac027773e272b703986f589) --- .../curate/service/XmlWorkflowCuratorService.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java b/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java index 2ad1eac12904..778b779cfe03 100644 --- a/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java +++ b/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java @@ -42,9 +42,9 @@ public boolean needsCuration(Context c, XmlWorkflowItem wfi) * * @param c the context * @param wfi the workflow item - * @return true if curation was completed or not required, + * @return true if curation was completed or not required; * false if tasks were queued for later completion, - * or item was rejected + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error @@ -58,7 +58,9 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) * @param curator the curation context * @param c the user context * @param wfId the workflow item's ID - * @return true if curation failed. + * @return true if curation curation was completed or not required; + * false if tasks were queued for later completion, + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error @@ -72,7 +74,9 @@ public boolean curate(Curator curator, Context c, String wfId) * @param curator the curation context * @param c the user context * @param wfi the workflow item - * @return true if curation failed. + * @return true if workflow curation was completed or not required; + * false if tasks were queued for later completion, + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error From 06f6a41cbfaca39e1fef3c88a2c3f24de3e91808 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Tue, 8 Aug 2023 11:04:28 -0400 Subject: [PATCH 456/686] Handle missing role. (cherry picked from commit 2e62fa3fd1f264aac0bb4a12953b6385211e5656) --- .../curate/XmlWorkflowCuratorServiceImpl.java | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java index 97537befd2f4..70a36f278ed1 100644 --- a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java @@ -31,6 +31,7 @@ import org.dspace.workflow.FlowStep; import org.dspace.workflow.Task; import org.dspace.workflow.TaskSet; +import org.dspace.xmlworkflow.Role; import org.dspace.xmlworkflow.RoleMembers; import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; @@ -276,12 +277,17 @@ protected List resolveContacts(Context c, List contacts, String.valueOf(wfi.getID()), e); return epList; } - RoleMembers roleMembers = step.getRole().getMembers(c, wfi); - for (EPerson ep : roleMembers.getEPersons()) { - epList.add(ep); - } - for (Group group : roleMembers.getGroups()) { - epList.addAll(group.getMembers()); + Role role = step.getRole(); + if (null != role) { + RoleMembers roleMembers = role.getMembers(c, wfi); + for (EPerson ep : roleMembers.getEPersons()) { + epList.add(ep); + } + for (Group group : roleMembers.getGroups()) { + epList.addAll(group.getMembers()); + } + } else { + epList.add(ePersonService.getSystemEPerson(c)); } } else if ("$colladmin".equals(contact)) { // special literal for collection administrators From b36a535ae5797ea5700055da42974a6147c7fa38 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Fri, 27 Oct 2023 12:40:34 +0200 Subject: [PATCH 457/686] [DSC-898] fix dc.date.accessioned during migration of CRIS entities --- .../dspace_cris_migration_post_import.kjb | 56 +- .../migration/update_dc_date_accessioned.ktr | 634 ++++++++++++++++++ 2 files changed, 686 insertions(+), 4 deletions(-) create mode 100644 dspace/etc/migration/update_dc_date_accessioned.ktr diff --git a/dspace/etc/migration/dspace_cris_migration_post_import.kjb b/dspace/etc/migration/dspace_cris_migration_post_import.kjb index b766c5d3723a..0c18927b5849 100644 --- a/dspace/etc/migration/dspace_cris_migration_post_import.kjb +++ b/dspace/etc/migration/dspace_cris_migration_post_import.kjb @@ -381,8 +381,8 @@ N Y 0 - 1448 - 112 + 1024 + 304 @@ -537,10 +537,49 @@ N Y 0 - 1290 + 1248 112 + + UPDATE dc_date_accessioned + + TRANS + + filename + + ${Internal.Entry.Current.Directory}/update_dc_date_accessioned.ktr + + N + N + N + N + N + N + + + N + N + Basic + N + + N + Y + N + N + N + Pentaho local + N + + Y + + N + Y + 1 + 1248 + 304 + + @@ -590,8 +629,17 @@ Migrate doi2item - Successo + UPDATE dc_date_accessioned 0 + 1 + Y + Y + N + + + UPDATE dc_date_accessioned + Successo + 1 0 Y Y diff --git a/dspace/etc/migration/update_dc_date_accessioned.ktr b/dspace/etc/migration/update_dc_date_accessioned.ktr new file mode 100644 index 000000000000..85be8c3c05b0 --- /dev/null +++ b/dspace/etc/migration/update_dc_date_accessioned.ktr @@ -0,0 +1,634 @@ + + + + udpate_dc_date_accessioned + + + + Normal + / + + + + + + + + + + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + TRANSNAME + Y + TRANSNAME + + + STATUS + Y + STATUS + + + LINES_READ + Y + LINES_READ + + + + LINES_WRITTEN + Y + LINES_WRITTEN + + + + LINES_UPDATED + Y + LINES_UPDATED + + + + LINES_INPUT + Y + LINES_INPUT + + + + LINES_OUTPUT + Y + LINES_OUTPUT + + + + LINES_REJECTED + Y + LINES_REJECTED + + + + ERRORS + Y + ERRORS + + + STARTDATE + Y + STARTDATE + + + ENDDATE + Y + ENDDATE + + + LOGDATE + Y + LOGDATE + + + DEPDATE + Y + DEPDATE + + + REPLAYDATE + Y + REPLAYDATE + + + LOG_FIELD + Y + LOG_FIELD + + + EXECUTING_SERVER + N + EXECUTING_SERVER + + + EXECUTING_USER + N + EXECUTING_USER + + + CLIENT + N + CLIENT + + + + + +
    + + + + ID_BATCH + Y + ID_BATCH + + + SEQ_NR + Y + SEQ_NR + + + LOGDATE + Y + LOGDATE + + + TRANSNAME + Y + TRANSNAME + + + STEPNAME + Y + STEPNAME + + + STEP_COPY + Y + STEP_COPY + + + LINES_READ + Y + LINES_READ + + + LINES_WRITTEN + Y + LINES_WRITTEN + + + LINES_UPDATED + Y + LINES_UPDATED + + + LINES_INPUT + Y + LINES_INPUT + + + LINES_OUTPUT + Y + LINES_OUTPUT + + + LINES_REJECTED + Y + LINES_REJECTED + + + ERRORS + Y + ERRORS + + + INPUT_BUFFER_ROWS + Y + INPUT_BUFFER_ROWS + + + OUTPUT_BUFFER_ROWS + Y + OUTPUT_BUFFER_ROWS + + + + + +
    + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + LOG_DATE + Y + LOG_DATE + + + LOGGING_OBJECT_TYPE + Y + LOGGING_OBJECT_TYPE + + + OBJECT_NAME + Y + OBJECT_NAME + + + OBJECT_COPY + Y + OBJECT_COPY + + + REPOSITORY_DIRECTORY + Y + REPOSITORY_DIRECTORY + + + FILENAME + Y + FILENAME + + + OBJECT_ID + Y + OBJECT_ID + + + OBJECT_REVISION + Y + OBJECT_REVISION + + + PARENT_CHANNEL_ID + Y + PARENT_CHANNEL_ID + + + ROOT_CHANNEL_ID + Y + ROOT_CHANNEL_ID + + + + + +
    + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + LOG_DATE + Y + LOG_DATE + + + TRANSNAME + Y + TRANSNAME + + + STEPNAME + Y + STEPNAME + + + STEP_COPY + Y + STEP_COPY + + + LINES_READ + Y + LINES_READ + + + LINES_WRITTEN + Y + LINES_WRITTEN + + + LINES_UPDATED + Y + LINES_UPDATED + + + LINES_INPUT + Y + LINES_INPUT + + + LINES_OUTPUT + Y + LINES_OUTPUT + + + LINES_REJECTED + Y + LINES_REJECTED + + + ERRORS + Y + ERRORS + + + LOG_FIELD + N + LOG_FIELD + + + + + +
    + + + ID_BATCH + Y + ID_BATCH + + + CHANNEL_ID + Y + CHANNEL_ID + + + LOG_DATE + Y + LOG_DATE + + + METRICS_DATE + Y + METRICS_DATE + + + METRICS_CODE + Y + METRICS_CODE + + + METRICS_DESCRIPTION + Y + METRICS_DESCRIPTION + + + METRICS_SUBJECT + Y + METRICS_SUBJECT + + + METRICS_TYPE + Y + METRICS_TYPE + + + METRICS_VALUE + Y + METRICS_VALUE + + + + + +
    + + 0.0 + 0.0 + + 10000 + 50 + 50 + N + Y + 50000 + Y + + N + 1000 + 100 + + + + + + + + + - + 2021/07/23 16:30:09.987 + - + 2021/07/23 16:30:09.987 + H4sIAAAAAAAAAAMAAAAAAAAAAAA= + N + + + + + dspace + ${db_host_name} + POSTGRESQL + Native + ${db_name} + ${db_port_number} + ${db_username} + ${db_password} + + + + + + EXTRA_OPTION_POSTGRESQL.stringtype + unspecified + + + FORCE_IDENTIFIERS_TO_LOWERCASE + N + + + FORCE_IDENTIFIERS_TO_UPPERCASE + N + + + IS_CLUSTERED + N + + + PORT_NUMBER + ${db_port_number} + + + PRESERVE_RESERVED_WORD_CASE + Y + + + QUOTE_ALL_FIELDS + N + + + SUPPORTS_BOOLEAN_DATA_TYPE + Y + + + SUPPORTS_TIMESTAMP_DATA_TYPE + Y + + + USE_POOLING + N + + + + + + Generate rows + Execute SQL script + Y + + + + Execute SQL script + ExecSQL + + Y + + 1 + + none + + + dspace + N + Y + N + N +DO $$ +DECLARE + row_data record; + metadataFieldLegacyId INT; + metadataFieldDateId INT; + row_count integer := 0; +BEGIN + + SELECT m.metadata_field_id + INTO metadataFieldLegacyId + FROM metadatafieldregistry m JOIN metadataschemaregistry s + ON m.metadata_schema_id = s.metadata_schema_id + WHERE s.short_id = 'cris' AND m.element = 'legacyId'; + + SELECT m.metadata_field_id + INTO metadataFieldDateId + FROM metadatafieldregistry m JOIN metadataschemaregistry s + ON m.metadata_schema_id = s.metadata_schema_id + WHERE s.short_id = 'dc' AND m.element = 'date' AND m.qualifier = 'accessioned'; + + FOR row_data IN ( + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_do d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + UNION ALL + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_rpage d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + UNION ALL + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_project d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + UNION ALL + SELECT m.dspace_object_id, d.timestampcreated + FROM old_cris_orgunit d + JOIN metadatavalue m ON d.crisid = m.text_value + WHERE m.metadata_field_id = metadataFieldLegacyId + + ) + LOOP + UPDATE metadatavalue + SET text_value = TO_CHAR(row_data.timestampcreated, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') + WHERE dspace_object_id = row_data.dspace_object_id + AND metadata_field_id = metadataFieldDateId; + + RAISE INFO 'Updating DSpace object with uuid %', row_data.dspace_object_id; + RAISE INFO 'New date is %', TO_CHAR(row_data.timestampcreated, 'YYYY-MM-DD"T"HH24:MI:SS"Z"'); + + row_count := row_count + 1; + END LOOP; + RAISE NOTICE 'Total items processed: %', row_count; +END $$; + + N + + + + + + + + + + + + + + + + 320 + 176 + Y + + + + Generate rows + RowGenerator + + Y + + 1 + + none + + + + + 1 + N + 5000 + now + FiveSecondsAgo + + + + + + + + + + 112 + 176 + Y + + + + + + + N + + From 35f3cf022a8264a57608fb05ef1ed0fc124b4f82 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Fri, 27 Oct 2023 16:33:21 +0200 Subject: [PATCH 458/686] [CST-12350] Fixes overrides metadata in RegistrationData for auth users --- .../dspace/app/rest/converter/RegistrationDataConverter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java index 5b742366b582..3ec5bfbf533c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/RegistrationDataConverter.java @@ -66,6 +66,9 @@ public RegistrationRest convert(RegistrationData registrationData, Projection pr EPerson ePerson = null; try { ePerson = accountService.getEPerson(context, registrationData.getToken()); + if (ePerson == null && registrationData.getRegistrationType().equals(RegistrationTypeEnum.ORCID)) { + ePerson = context.getCurrentUser(); + } } catch (SQLException | AuthorizeException e) { throw new RuntimeException(e); } From 1bc6e1ff7124b0e1a394301abbb9dfa3a50b110a Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 13 Sep 2023 16:56:29 -0500 Subject: [PATCH 459/686] Fix "Site cannot be indexed" error by ignoring ADD/REMOVE events on Site object (cherry picked from commit ef7f02fe81bc570353c0bf6a43706c77909e30e3) --- .../main/java/org/dspace/discovery/IndexEventConsumer.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index 4ff1f3134484..bf1c7da4e150 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -154,7 +154,11 @@ public void consume(Context ctx, Event event) throws Exception { case Event.REMOVE: case Event.ADD: - if (object == null) { + // At this time, ADD and REMOVE actions are ignored on SITE object. They are only triggered for + // top-level communities. No action is necessary as Community itself is indexed (or deleted) separately. + if (event.getSubjectType() == Constants.SITE) { + log.debug(event.getEventTypeAsString() + " event triggered for Site object. Skipping it."); + } else if (object == null) { log.warn(event.getEventTypeAsString() + " event, could not get object for " + event.getObjectTypeAsString() + " id=" + event.getObjectID() From e597c2afc1a56fb4d36ca15e1ada756e5d1a8c4d Mon Sep 17 00:00:00 2001 From: wwuck Date: Sat, 28 Oct 2023 00:32:54 +1100 Subject: [PATCH 460/686] Add a null check when assigning ldap groups Prevent NullReferenceException by checking if the group list is null Fixes #8920 (cherry picked from commit bb6498ed5e4696201d3e45bd377faa407dca277f) --- .../authenticate/LDAPAuthentication.java | 97 ++++++++++++------- 1 file changed, 60 insertions(+), 37 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index afd82db863ba..aced16876db0 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -713,8 +713,8 @@ public String getName() { private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); - int i = 1; - String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); + int groupmapIndex = 1; + String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + groupmapIndex); boolean cmp; @@ -725,49 +725,72 @@ private void assignGroups(String dn, ArrayList group, Context context) { String ldapSearchString = t[0]; String dspaceGroupName = t[1]; - // list of strings with dn from LDAP groups - // inner loop - Iterator groupIterator = group.iterator(); - while (groupIterator.hasNext()) { + if (group == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); - // save the current entry from iterator for further use - String currentGroup = groupIterator.next(); - - // very much the old code from DSpace <= 7.5 - if (currentGroup == null) { - cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); - } else { - cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + if (cmp) { + assignGroup(context, groupmapIndex, dspaceGroupName); } + } else { + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); - } else { - // The group does not exist - log.warn(LogHelper.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); - } - } catch (AuthorizeException ae) { - log.debug(LogHelper.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + } else { + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + assignGroup(context, groupmapIndex, dspaceGroupName); } } } - groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i); + groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++groupmapIndex); + } + } + } + + /** + * Add the current authenticated user to the specified group + * + * @param context + * DSpace context + * + * @param groupmapIndex + * authentication-ldap.login.groupmap.* key index defined in dspace.cfg + * + * @param dspaceGroupName + * The DSpace group to add the user to + */ + private void assignGroup(Context context, int groupmapIndex, String dspaceGroupName) { + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + groupmapIndex + + " does not exist :: " + dspaceGroupName)); } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); } } From b1639de11fa6908ea45e80725069257625311fee Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Mon, 30 Oct 2023 12:35:38 +0200 Subject: [PATCH 461/686] [DSC-1315] added avilibilty to use different cris layout configuration for the same entity type --- .../java/org/dspace/layout/CrisLayoutTab.java | 11 + .../dspace/layout/dao/CrisLayoutTabDAO.java | 13 +- .../layout/dao/impl/CrisLayoutTabDAOImpl.java | 27 +- .../impl/CrisLayoutToolParserImpl.java | 21 +- .../layout/service/CrisLayoutTabService.java | 9 +- .../impl/CrisLayoutTabServiceImpl.java | 73 ++++- .../h2/V7.6_2023.10.23__add_custom_filter.sql | 17 ++ .../V7.6_2023.10.23__add_custom_filter.sql | 17 ++ .../V7.6_2023.10.23__add_custom_filter.sql | 17 ++ ....28__update_cris_layout_tab_constraint.sql | 18 ++ .../dspace/builder/CrisLayoutTabBuilder.java | 5 + .../layout/script/CrisLayoutToolScriptIT.java | 4 +- .../converter/CrisLayoutTabConverter.java | 4 +- .../app/rest/model/CrisLayoutTabRest.java | 11 +- .../CrisLayoutTabRestRepository.java | 1 + .../layout/CrisLayoutTabRestRepositoryIT.java | 288 ++++++++++++++++++ dspace/config/dspace.cfg | 4 + 17 files changed, 516 insertions(+), 24 deletions(-) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java index 4f938ca69aec..48bd0dc56112 100644 --- a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java @@ -79,6 +79,9 @@ public class CrisLayoutTab implements ReloadableEntity { @Column(name = "security") private Integer security; + @Column(name = "custom_filter") + private String customFilter; + @ManyToMany(fetch = FetchType.LAZY) @JoinTable(name = "cris_layout_tab2securitymetadata", joinColumns = { @JoinColumn(name = "tab_id") }, inverseJoinColumns = { @JoinColumn(name = "metadata_field_id") }) @@ -180,6 +183,14 @@ public void setSecurity(Integer security) { this.security = security; } + public String getCustomFilter() { + return customFilter; + } + + public void setCustomFilter(String customFilter) { + this.customFilter = customFilter; + } + public Set getMetadataSecurityFields() { return metadataSecurityFields; } diff --git a/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java b/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java index 195b53c06f36..944e7c34715e 100644 --- a/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java +++ b/dspace-api/src/main/java/org/dspace/layout/dao/CrisLayoutTabDAO.java @@ -65,6 +65,17 @@ public interface CrisLayoutTabDAO extends GenericDAO { public List findByEntityTypeAndEagerlyFetchBoxes(Context context, String entityType) throws SQLException; + /** + * Returns all tabs in database filtered by entity type {@link EntityType} + * @param context The relevant DSpace Context + * @param entityType entity type label {@link EntityType} + * @param customFilter specialized entity type label {@link CrisLayoutTab#getCustomFilter()} + * @return List of CrisLayoutTab {@link CrisLayoutTab} + * @throws SQLException An exception that provides information on a database errors. + */ + public List findByEntityTypeAndEagerlyFetchBoxes(Context context, + String entityType, String customFilter) throws SQLException; + /** * Returns all tabs in database filtered by entity type {@link EntityType} * @param context The relevant DSpace Context @@ -75,7 +86,7 @@ public List findByEntityTypeAndEagerlyFetchBoxes(Context context, * @throws SQLException An exception that provides information on a database errors. */ public List findByEntityTypeAndEagerlyFetchBoxes( - Context context, String entityType, Integer limit, Integer offset) throws SQLException; + Context context, String entityType, String customFilter, Integer limit, Integer offset) throws SQLException; /** * Returns the total number of metadata field associated at tab diff --git a/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java b/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java index f1d34a3f74f5..bd898d8e1b40 100644 --- a/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/dao/impl/CrisLayoutTabDAOImpl.java @@ -10,14 +10,17 @@ import static org.dspace.layout.CrisLayoutTab.ROWS_AND_CONTENT_GRAPH; import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import javax.persistence.EntityGraph; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Join; +import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.EntityType; import org.dspace.content.EntityType_; import org.dspace.content.MetadataField; @@ -62,24 +65,42 @@ public Long countTotal(Context context) throws SQLException { return getHibernateSession(context).createQuery(cc).getSingleResult(); } + /* (non-Javadoc) + * @see org.dspace.layout.dao.CrisLayoutTabDAO#findByEntityType(java.lang.String) + */ + @Override + public List findByEntityTypeAndEagerlyFetchBoxes(Context context, + String entityType, String customFilter) throws SQLException { + return findByEntityTypeAndEagerlyFetchBoxes(context, entityType, customFilter, null, null); + } + /* (non-Javadoc) * @see org.dspace.layout.dao.CrisLayoutTabDAO#findByEntityType(java.lang.String) */ @Override public List findByEntityTypeAndEagerlyFetchBoxes(Context context, String entityType) throws SQLException { - return findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null, null); + return findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null, null, null); } @Override public List findByEntityTypeAndEagerlyFetchBoxes(Context context, String entityType, - Integer limit, Integer offset) throws SQLException { + String customFilter, Integer limit, Integer offset) throws SQLException { CriteriaBuilder cb = getCriteriaBuilder(context); CriteriaQuery query = cb.createQuery(CrisLayoutTab.class); Root tabRoot = query.from(CrisLayoutTab.class); + List andPredicates = new ArrayList<>(); + + andPredicates.add(cb.equal(tabRoot.get(CrisLayoutTab_.entity).get(EntityType_.LABEL), entityType)); + if (StringUtils.isNotBlank(customFilter)) { + andPredicates.add(cb.equal(tabRoot.get(CrisLayoutTab_.CUSTOM_FILTER), customFilter)); + } else { + andPredicates.add(cb.isNull((tabRoot.get(CrisLayoutTab_.CUSTOM_FILTER)))); + } + query - .where(cb.equal(tabRoot.get(CrisLayoutTab_.entity).get(EntityType_.LABEL), entityType)) + .where(andPredicates.toArray(new Predicate[] {})) .orderBy(cb.asc(tabRoot.get(CrisLayoutTab_.PRIORITY))); TypedQuery typedQuery = getHibernateSession(context).createQuery(query); diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java index e8ace7e91008..9c368c2785b2 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java @@ -120,9 +120,14 @@ private CrisLayoutTab buildTab(Context context, Row tabRow) { Workbook workbook = tabRow.getSheet().getWorkbook(); String name = getCellValue(tabRow, SHORTNAME_COLUMN); - String entityType = getCellValue(tabRow, ENTITY_COLUMN); + String entityColumn = getCellValue(tabRow, ENTITY_COLUMN); + + int index = entityColumn.indexOf("."); + String customFilter = (index > 0 && index < entityColumn.length()) ? entityColumn.substring(index + 1) : null; + String entityType = (index > 0) ? entityColumn.substring(0, index) : entityColumn; tab.setEntity(getEntityType(context, entityType)); + tab.setCustomFilter(customFilter); tab.setShortName(name); tab.setHeader(getCellValue(tabRow, LABEL_COLUMN)); tab.setLeading(toBoolean(getCellValue(tabRow, LEADING_COLUMN))); @@ -172,7 +177,7 @@ private CrisLayoutCell buildCell(Context context, Row tab2boxRow) { private List buildBoxes(Context context, Row tab2boxRow) { - String entityType = getCellValue(tab2boxRow, ENTITY_COLUMN); + String entityType = getEntityValue(tab2boxRow, ENTITY_COLUMN); String boxes = getCellValue(tab2boxRow, BOXES_COLUMN); if (StringUtils.isBlank(boxes)) { @@ -281,7 +286,7 @@ private CrisLayoutField buildMetadataGroupField(Context context, Row row) { private List buildCrisMetadataGroups(Context context, Row row) { String metadataField = getCellValue(row, METADATA_COLUMN); - String entity = getCellValue(row, ENTITY_COLUMN); + String entity = getEntityValue(row, ENTITY_COLUMN); Sheet metadatagroupsSheet = getSheetByName(row.getSheet().getWorkbook(), METADATAGROUPS_SHEET); @@ -374,7 +379,7 @@ private Set buildGroupSecurityField(Context context, Workbook workbook, private Stream getRowsByEntityAndColumnValue(Sheet sheet, String entity, String columnName, String value) { return WorkbookUtils.getNotEmptyRowsSkippingHeader(sheet).stream() .filter(row -> value.equals(getCellValue(row, columnName))) - .filter(row -> entity.equals(getCellValue(row, ENTITY_COLUMN))); + .filter(row -> entity.equals(getEntityValue(row, ENTITY_COLUMN))); } private boolean toBoolean(String value) { @@ -389,6 +394,14 @@ private Integer toInteger(String value) { } } + private String getEntityValue(Row row, String header) { + String cellValue = WorkbookUtils.getCellValue(row, header); + return Optional.ofNullable(cellValue) + .filter(cell -> cell.contains(".")) + .map(cell -> cell.split("\\.")[0]) + .orElse(StringUtils.isNotBlank(cellValue) ? cellValue : null); + } + private String getCellValue(Row row, String header) { String cellValue = WorkbookUtils.getCellValue(row, header); return StringUtils.isNotBlank(cellValue) ? cellValue : null; diff --git a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java index dc1980e7894b..7224b8f26302 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java @@ -94,22 +94,25 @@ public CrisLayoutTab create(Context context, EntityType eType, Integer priority) * Returns all tabs in the database filtered by entity type {@link EntityType} * @param context The relevant DSpace Context * @param entityType label of entity type {@link EntityType} + * @param customFilter label of specialized entity type {@link CrisLayoutTab#getCustomFilter()} * @return List of CrisLayoutTab {@link CrisLayoutTab} * @throws SQLException An exception that provides information on a database errors. */ - public List findByEntityType(Context context, String entityType) throws SQLException; + public List findByEntityType(Context context, String entityType, String customFilter) + throws SQLException; /** * Returns all tabs in database filtered by entity type {@link EntityType} * @param context The relevant DSpace Context * @param entityType entity type label + * @param customFilter label of specialized entity type {@link CrisLayoutTab#getCustomFilter()} * @param limit how many results return * @param offset the position of the first result to return * @return List of CrisLayoutTab {@link CrisLayoutTab} * @throws SQLException An exception that provides information on a database errors. */ - public List findByEntityType(Context context, String entityType, Integer limit, Integer offset) - throws SQLException; + public List findByEntityType(Context context, String entityType, String customFilter, Integer limit, + Integer offset) throws SQLException; /** * Returns the total number of tabs with a specific entity type diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java index 0758853d4dd2..bd4cddcdf260 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java @@ -7,13 +7,20 @@ */ package org.dspace.layout.service.impl; +import static org.dspace.util.FunctionalUtils.throwingMapperWrapper; + import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.UUID; +import javax.annotation.PostConstruct; import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.util.SubmissionConfigReader; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.EntityType; @@ -24,6 +31,7 @@ import org.dspace.layout.CrisLayoutTab; import org.dspace.layout.dao.CrisLayoutTabDAO; import org.dspace.layout.service.CrisLayoutTabService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -42,6 +50,16 @@ public class CrisLayoutTabServiceImpl implements CrisLayoutTabService { @Autowired private ItemService itemService; + @Autowired + private ConfigurationService configurationService; + + private SubmissionConfigReader submissionConfigReader; + + @PostConstruct + private void setup() throws SubmissionConfigReaderException { + submissionConfigReader = new SubmissionConfigReader(); + } + @Override public CrisLayoutTab create(Context c, CrisLayoutTab tab) throws SQLException, AuthorizeException { if (!authorizeService.isAdmin(c)) { @@ -135,8 +153,9 @@ public Long countTotal(Context context) throws SQLException { * @see org.dspace.layout.service.CrisLayoutTabService#findByEntityType(org.dspace.core.Context, java.lang.String) */ @Override - public List findByEntityType(Context context, String entityType) throws SQLException { - return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType); + public List findByEntityType(Context context, String entityType, String customFilter) + throws SQLException { + return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, customFilter); } /* (non-Javadoc) @@ -144,9 +163,9 @@ public List findByEntityType(Context context, String entityType) * (org.dspace.core.Context, java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override - public List findByEntityType(Context context, String entityType, Integer limit, Integer offset) - throws SQLException { - return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, limit, offset); + public List findByEntityType(Context context, String entityType, String customFilter, Integer limit, + Integer offset) throws SQLException { + return dao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, customFilter, limit, offset); } /* (non-Javadoc) @@ -171,12 +190,48 @@ public Long totalMetadataField(Context context, Integer tabId) throws SQLExcepti @Override public List findByItem(Context context, String itemUuid) throws SQLException { Item item = Objects.requireNonNull(itemService.find(context, UUID.fromString(itemUuid)), - "The itemUuid entered does not match with any item"); - String entityType = itemService.getMetadata(item, "dspace.entity.type"); - if (entityType == null) { + "The itemUuid entered does not match with any item"); + + String entityTypeValue = itemService.getMetadata(item, "dspace.entity.type"); + String submissionName = getSubmissionDefinitionName(item); + + List layoutTabs = + Optional.ofNullable(this.configurationService.getProperty("dspace.metadata.layout.tab")) + .map(metadataField -> this.itemService.getMetadataByMetadataString(item, metadataField)) + .filter(metadatas -> !metadatas.isEmpty()) + .map(metadatas -> metadatas.get(0)) + .map(metadata -> + findValidEntityType(context, entityTypeValue, submissionName + "." + + metadata.getAuthority()) + .orElse( + findValidEntityType(context, entityTypeValue, submissionName + "." + + metadata.getValue()) + .orElse(findValidEntityType(context, entityTypeValue, metadata.getAuthority()) + .orElse(findValidEntityType(context, entityTypeValue, metadata.getValue()) + .orElse(null)))) + ) + .orElse(findValidEntityType(context, entityTypeValue, submissionName) + .orElse(findByEntityType(context, entityTypeValue, null))); + if (layoutTabs == null) { return Collections.emptyList(); } - return findByEntityType(context, entityType); + return layoutTabs; + } + + private String getSubmissionDefinitionName(Item item) { + return submissionConfigReader.getSubmissionConfigByCollection(item.getOwningCollection()).getSubmissionName(); + } + + private Optional> findValidEntityType(Context context, String entityTypeValue, + String customFilter) { + return Optional.ofNullable(customFilter) + .map( + throwingMapperWrapper( + value -> findByEntityType(context, entityTypeValue, value), + null + ) + ) + .filter(tabs -> tabs != null && !tabs.isEmpty()); } } diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql new file mode 100644 index 000000000000..369bd14f7064 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.23__add_custom_filter.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +ALTER TABLE cris_layout_tab ADD IF NOT EXISTS custom_filter varchar(255); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql new file mode 100644 index 000000000000..369bd14f7064 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.23__add_custom_filter.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +ALTER TABLE cris_layout_tab ADD IF NOT EXISTS custom_filter varchar(255); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql new file mode 100644 index 000000000000..369bd14f7064 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.23__add_custom_filter.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +ALTER TABLE cris_layout_tab ADD IF NOT EXISTS custom_filter varchar(255); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql new file mode 100644 index 000000000000..6ea435bfeed2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- -- +-- Remove unique constraint on entity_id and shortname of table cris_layout_tab. +-- Now the entity_id and shortname aren't unique because entity_type can have custom_filter in it +-- -- +ALTER TABLE cris_layout_tab DROP CONSTRAINT cris_layout_tab_entity_shortname_unique; + +-- -- +-- +-- -- +ALTER TABLE cris_layout_tab ADD CONSTRAINT cris_layout_tab_entity_shortname_custom_filter_unique UNIQUE(entity_id, shortname, custom_filter); \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java index 70abf7b8ae9d..312d04bb4ada 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java @@ -117,6 +117,11 @@ public CrisLayoutTabBuilder withShortName(String shortName) { return this; } + public CrisLayoutTabBuilder withCustomFilter(String customFilter) { + this.tab.setCustomFilter(customFilter); + return this; + } + public CrisLayoutTabBuilder withHeader(String header) { this.tab.setHeader(header); return this; diff --git a/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java b/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java index e2c264386a64..86786fa9f256 100644 --- a/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java +++ b/dspace-api/src/test/java/org/dspace/layout/script/CrisLayoutToolScriptIT.java @@ -274,7 +274,7 @@ public void testWithValidLayout() throws InstantiationException, IllegalAccessEx assertThat(tabService.findAll(context), hasSize(3)); - List personTabs = tabService.findByEntityType(context, "Person"); + List personTabs = tabService.findByEntityType(context, "Person", null); assertThat(personTabs, hasSize(2)); CrisLayoutTab firstPersonTab = personTabs.get(0); @@ -369,7 +369,7 @@ public void testWithValidLayout() throws InstantiationException, IllegalAccessEx assertThat(profileResearchoutputsBox.getGroupSecurityFields(), contains(matches(groupField -> groupField.getName().equals("Researchers")))); - List publicationTabs = tabService.findByEntityType(context, "Publication"); + List publicationTabs = tabService.findByEntityType(context, "Publication", null); assertThat(publicationTabs, hasSize(1)); CrisLayoutTab publicationTab = publicationTabs.get(0); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java index d35a80b341a8..d15a04cf0108 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java @@ -39,7 +39,7 @@ /** * This is the converter from Entity CrisLayoutTab to the REST data model - * + * * @author Danilo Di Nuzzo (danilo.dinuzzo at 4science.it) * */ @@ -66,6 +66,7 @@ public CrisLayoutTabRest convert(CrisLayoutTab model, Projection projection) { CrisLayoutTabRest rest = new CrisLayoutTabRest(); rest.setId(model.getID()); rest.setEntityType(model.getEntity().getLabel()); + rest.setCustomFilter(model.getCustomFilter()); rest.setShortname(model.getShortName()); rest.setHeader(model.getHeader()); rest.setPriority(model.getPriority()); @@ -87,6 +88,7 @@ public CrisLayoutTab toModel(Context context, CrisLayoutTabRest rest) { tab.setSecurity(LayoutSecurity.valueOf(rest.getSecurity())); tab.setShortName(rest.getShortname()); tab.setEntity(findEntityType(context, rest)); + tab.setCustomFilter(rest.getCustomFilter()); tab.setLeading(rest.isLeading()); rest.getRows().forEach(row -> tab.addRow(toRowModel(context, row))); return tab; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java index 72803cd751b8..9bebef4f9c33 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/CrisLayoutTabRest.java @@ -18,7 +18,7 @@ /** * The CrisLayoutTab REST Resource - * + * * @author Danilo Di Nuzzo (danilo.dinuzzo at 4science.it) * */ @@ -39,6 +39,7 @@ public class CrisLayoutTabRest extends BaseObjectRest { private String shortname; private String header; private String entityType; + private String customFilter; private Integer priority; private Integer security; private Boolean leading; @@ -102,6 +103,14 @@ public void setEntityType(String entityType) { this.entityType = entityType; } + public String getCustomFilter() { + return customFilter; + } + + public void setCustomFilter(String customFilter) { + this.customFilter = customFilter; + } + public Integer getPriority() { return priority; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java index 301ada1b1890..f0017576958a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CrisLayoutTabRestRepository.java @@ -106,6 +106,7 @@ public Page findByEntityType( tabList = service.findByEntityType( context, type, + null, pageable.getPageSize(), (pageable.getPageNumber() * pageable.getPageSize()) ); } catch (SQLException e) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index 80ed84453088..a5ecfd92fdbe 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -63,6 +63,7 @@ import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; +import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; @@ -634,6 +635,293 @@ public void findByItem() throws Exception { .andExpect(jsonPath("$._embedded.tabs[1].rows[0].cells[0].boxes", contains(matchBox(box)))); } + /** + * Test for endpoint /api/layout/tabs/search/findByItem?uuid= + * The tabs are sorted by priority ascending. This are filtered based on the permission of the + * current user and available data. + * The expected result is a list of tabs derived from the item type, where the item type is: + *
      + *
    • submissionName.Authority of metadata configured in property {@code dspace.metadata.layout.tab}
    • + *
    • If null, submissionName.value of that metadata
    • + *
    • if null, Authority of metadata configured in property {@code dspace.metadata.layout.tab}
    • + *
    • If null, value of that metadata
    • + *
    • if null, submission name of item
    • + *
    • If null, value of entity type (metadata {@code dspace.entity.type})
    • + *
    • Otherwise, null
    • + *
    + * @throws Exception + */ + @Test + public void findByItemMetadata() throws Exception { + context.turnOffAuthorisationSystem(); + + // Create new community + Community community = CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + // Create new collection + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Test Collection") + .withSubmissionDefinition("publication") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, community) + .withName("Test Collection two") + .withSubmissionDefinition("traditional") + .build(); + + Collection collectionThree = CollectionBuilder.createCollection(context, community) + .withName("Test Collection two") + .withSubmissionDefinition("patent") + .build(); + + // Create entity Type + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType journalType = EntityTypeBuilder.createEntityTypeBuilder(context, "Journal").build(); + EntityType patentType = EntityTypeBuilder.createEntityTypeBuilder(context, "Patent").build(); + EntityType eTypePer = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + EntityType eTypeCollection = EntityTypeBuilder.createEntityTypeBuilder(context, "Collection").build(); + MetadataSchema schema = mdss.find(context, MetadataSchemaEnum.DC.getName()); + MetadataField title = mfss.findByElement(context, schema, "title", null); + + // Create new items + // first uses metadata type authority and submission as custom filter + String authority = "publication-coar-types:c_2f33"; + String metadataValue = "Resource Types::text::book"; + String submissionNameMetadataValue = "traditional." + metadataValue; + String submissionNameAuthority = "patent." + authority; + + Item itemPublicationAuthority = ItemBuilder.createItem(context, collection) + .withTitle("TITLE") + .withType(metadataValue, authority) + .withEntityType(publicationType.getLabel()) + .build(); + // second uses ametadata type value as custom filter + Item itemPublicationValue = ItemBuilder.createItem(context, collection) + .withTitle("TITLE 1") + .withType(metadataValue) + .withEntityType(publicationType.getLabel()) + .build(); + // third uses entity type value as custom filter + Item itemPublication = ItemBuilder.createItem(context, collection) + .withTitle("TITLE 2") + .withEntityType(publicationType.getLabel()) + .build(); + // fourth uses submission name as custom filter + Item itemPublicationSubmission = ItemBuilder.createItem(context, collection) + .withTitle("TITLE 3") + .withType("type value") + .withEntityType(publicationType.getLabel()) + .build(); + // fifth uses submissionName.metadataValue as custom filter + Item itemPublicationSubmissionMetadata = ItemBuilder.createItem(context, collectionTwo) + .withTitle("TITLE 4") + .withType(metadataValue) + .withEntityType(journalType.getLabel()) + .build(); + + // sixth uses submissionName.authority as custom filter + Item itemPublicationSubmissionAuthority = ItemBuilder.createItem(context, collectionThree) + .withTitle("TITLE 5") + .withType(metadataValue, authority) + .withEntityType(patentType.getLabel()) + .build(); + + + // Create tabs for Publication Entity + CrisLayoutField field = CrisLayoutFieldBuilder.createMetadataField(context, title, 0, 1) + .withLabel("TITLE") + .withRendering("TEXT") + //.withBox(boxOne) + .build(); + CrisLayoutBox boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + CrisLayoutTab tabAuthority = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Publication - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(authority) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + // Test + getClient() + .perform( + get("/api/layout/tabs/search/findByItem") + .param("uuid",itemPublicationAuthority.getID().toString()) + ) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tabAuthority)))); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + CrisLayoutTab tabPublicationValue = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Collection - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(metadataValue) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient() + .perform( + get("/api/layout/tabs/search/findByItem") + .param("uuid",itemPublicationValue.getID().toString()) + ) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect(jsonPath("$._embedded.tabs", contains(matchTab(tabPublicationValue)))); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + CrisLayoutTab tabPublication = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Person - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(null) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem").param("uuid", itemPublication.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabPublication) + ) + ) + ); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + + CrisLayoutTab tabSubmissionName = CrisLayoutTabBuilder.createTab(context, publicationType, 0) + .withShortName("TabOne For Submission - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter("publication") + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", itemPublicationSubmission.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabSubmissionName) + ) + ) + ); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, publicationType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + + CrisLayoutTab tabSubmissionNameMetadata = + CrisLayoutTabBuilder.createTab(context, journalType, 0) + .withShortName("TabOne For Submission metadata value - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(submissionNameMetadataValue) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", itemPublicationSubmissionMetadata.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabSubmissionNameMetadata) + ) + ) + ); + + context.turnOffAuthorisationSystem(); + + boxOne = CrisLayoutBoxBuilder.createBuilder(context, patentType, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .addField(field) + .build(); + + CrisLayoutTab tabSubmissionNameAuthority = + CrisLayoutTabBuilder.createTab(context, patentType, 0) + .withShortName("TabOne For Submission authority - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withCustomFilter(submissionNameAuthority) + .addBoxIntoNewRow(boxOne) + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", itemPublicationSubmissionAuthority.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))) + .andExpect( + jsonPath( + "$._embedded.tabs", + contains( + matchTab(tabSubmissionNameAuthority) + ) + ) + ); + } + /** * Test for the altering which happens at endpoint /api/layout/tabs/search/findByItem?uuid= * The configuration of CrisLayoutBoxRest: boxType=METRICS, is altered by inner joining the CrisLayoutBoxRest diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index d78479c2f23d..71c3c2c987c7 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1917,6 +1917,10 @@ module_dir = modules vocabulary.plugin.publication-coar-types.hierarchy.preloadLevel = 2 vocabulary.plugin.patent-coar-types.hierarchy.preloadLevel = 2 +# Custom metadata used for layout loading +# TAB instances +dspace.metadata.layout.tab = dc.type + # Bulk export limitations: 0 means bulk export not allowed, -1 means no limitations # admin users (including community & collection admin) bulk-export.limit.admin = -1 From 5596cc686d06076b4d599c39889de7262af15962 Mon Sep 17 00:00:00 2001 From: Christian Bethge Date: Tue, 29 Aug 2023 15:28:21 +0200 Subject: [PATCH 462/686] 9043 use Templates for compiled XSLT instead of Transformer - use Templates are thread-safe and NOT Transformer (cherry picked from commit 1160341cb2a2c163c8fddc04ddab46de9041e1b8) --- dspace-oai/pom.xml | 2 +- .../services/impl/resources/DSpaceResourceResolver.java | 7 +++---- .../dspace/xoai/tests/integration/xoai/PipelineTest.java | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 08e732d45733..37ff1f1b4b87 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -15,7 +15,7 @@ ${basedir}/.. - 3.3.0 + 3.3.1-SNAPSHOT 5.87.0.RELEASE diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java index e67e9c56bd7a..83c4486f7134 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java @@ -12,7 +12,7 @@ import java.io.IOException; import java.io.InputStream; import javax.xml.transform.Source; -import javax.xml.transform.Transformer; +import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamSource; @@ -40,8 +40,7 @@ public InputStream getResource(String path) throws IOException { } @Override - public Transformer getTransformer(String path) throws IOException, - TransformerConfigurationException { + public Templates getTemplates(String path) throws IOException, TransformerConfigurationException { // construct a Source that reads from an InputStream Source mySrc = new StreamSource(getResource(path)); // specify a system ID (the path to the XSLT-file on the filesystem) @@ -49,6 +48,6 @@ public Transformer getTransformer(String path) throws IOException, // XSLT-files (like ) String systemId = basePath + "/" + path; mySrc.setSystemId(systemId); - return transformerFactory.newTransformer(mySrc); + return transformerFactory.newTemplates(mySrc); } } diff --git a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java index de76c992458c..0f48824159c2 100644 --- a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java +++ b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java @@ -29,7 +29,7 @@ public void pipelineTest() throws Exception { InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml"); InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl"); String output = FileUtils.readAllText(new XSLPipeline(input, true) - .apply(factory.newTransformer(new StreamSource(xslt))) + .apply(factory.newTemplates(new StreamSource(xslt))) .getTransformed()); assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste"))); From 448387ccfdb986386df1fe0dec96170696cd9068 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 30 Oct 2023 15:13:39 -0500 Subject: [PATCH 463/686] Update to newly released XOAI 3.4.0 (cherry picked from commit 160ebbd791c0545db6516403da40cb191a2c8b99) --- dspace-oai/pom.xml | 39 ++++----------------------------------- 1 file changed, 4 insertions(+), 35 deletions(-) diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 37ff1f1b4b87..75452f9b904e 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -15,7 +15,7 @@ ${basedir}/.. - 3.3.1-SNAPSHOT + 3.4.0 5.87.0.RELEASE @@ -55,41 +55,10 @@ xoai ${xoai.version} + - org.hamcrest - hamcrest-all - - - - org.mockito - mockito-all - - - org.apache.commons - commons-lang3 - - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - org.codehaus.woodstox - wstx-asl - - - - org.dom4j - dom4j - - - - com.lyncode - test-support + com.fasterxml.woodstox + woodstox-core From 08f361a9cbcd13c5073b173048d47c17d2993f00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Tue, 3 Oct 2023 16:52:15 +0100 Subject: [PATCH 464/686] add test and fix (cherry picked from commit 48b0b71c6301b6eb46c387c47b71d0729cc2f889) --- .../main/java/org/dspace/content/Bundle.java | 2 +- .../java/org/dspace/content/BundleTest.java | 32 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/Bundle.java b/dspace-api/src/main/java/org/dspace/content/Bundle.java index 6c62c3dc9139..e5cbdb6ff244 100644 --- a/dspace-api/src/main/java/org/dspace/content/Bundle.java +++ b/dspace-api/src/main/java/org/dspace/content/Bundle.java @@ -126,7 +126,7 @@ public void setPrimaryBitstreamID(Bitstream bitstream) { * Unset the primary bitstream ID of the bundle */ public void unsetPrimaryBitstreamID() { - primaryBitstream = null; + setPrimaryBitstreamID(null); } /** diff --git a/dspace-api/src/test/java/org/dspace/content/BundleTest.java b/dspace-api/src/test/java/org/dspace/content/BundleTest.java index 4ff35f5b4df8..13b943b4d6b4 100644 --- a/dspace-api/src/test/java/org/dspace/content/BundleTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BundleTest.java @@ -513,6 +513,38 @@ public void testRemoveBitstreamAuth() throws SQLException, AuthorizeException, I } + /** + * Test removeBitstream method and also the unsetPrimaryBitstreamID method, of class Bundle. + */ + @Test + public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID() throws IOException, SQLException, AuthorizeException { + // Allow Item WRITE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); + // Allow Bundle ADD permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD); + // Allow Bitstream WRITE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); + // Allow Bitstream DELETE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE)); + + + context.turnOffAuthorisationSystem(); + //set a value different than default + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bs = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, bs); + b.setPrimaryBitstreamID(bs); + context.restoreAuthSystemState(); + + assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 0", b.getPrimaryBitstream(), equalTo(bs)); + //remove bitstream + bundleService.removeBitstream(context, b, bs); + //is -1 when not set + assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(null)); + } + /** * Test of update method, of class Bundle. */ From c96558986a00eb941ac72404e02121db33375064 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Thu, 19 Oct 2023 08:58:08 +0100 Subject: [PATCH 465/686] unset primary bitstream on bitstream service (cherry picked from commit 47ca74bc4220249b95de9b8e71186277c9ac31ca) --- .../main/java/org/dspace/content/BitstreamServiceImpl.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 16532660561d..9bc863860967 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -276,6 +276,10 @@ public void delete(Context context, Bitstream bitstream) throws SQLException, Au //Remove our bitstream from all our bundles final List bundles = bitstream.getBundles(); for (Bundle bundle : bundles) { + //We also need to remove the bitstream id when it's set as bundle's primary bitstream + if(bitstream.equals(bundle.getPrimaryBitstream())) { + bundle.unsetPrimaryBitstreamID(); + } bundle.removeBitstream(bitstream); } From 7ec5f7ec8acdb8794e5795e428394db609fa463b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Thu, 19 Oct 2023 09:38:01 +0100 Subject: [PATCH 466/686] adding sql expression to fix deleted primary bitstreams from bundle (cherry picked from commit 8a531ad0c7e8fdf09fa9a3870024687e6708a9a1) --- ....10.12__Fix-deleted-primary-bitstreams.sql | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql new file mode 100644 index 000000000000..b1739dbd9600 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql @@ -0,0 +1,26 @@ +BEGIN; + +-- Remove all primary bitstreams that are marked as deleted +UPDATE bundle +SET primary_bitstream_id = NULL +WHERE primary_bitstream_id IN + ( SELECT bs.uuid + FROM bitstream AS bs + INNER JOIN bundle as bl ON bs.uuid = bl.primary_bitstream_id + WHERE bs.deleted IS TRUE ); + +-- Remove all primary bitstreams that don't make part on bundle's bitstreams +UPDATE bundle +SET primary_bitstream_id = NULL +WHERE primary_bitstream_id IN + ( SELECT bl.primary_bitstream_id + FROM bundle as bl + WHERE bl.primary_bitstream_id IS NOT NULL + AND bl.primary_bitstream_id NOT IN + ( SELECT bitstream_id + FROM bundle2bitstream AS b2b + WHERE b2b.bundle_id = bl.uuid + ) + ); + +COMMIT; \ No newline at end of file From 7ab1d396711408be0140ba2058ab68e84f450972 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Thu, 19 Oct 2023 09:58:24 +0100 Subject: [PATCH 467/686] add bundle remove authorization (cherry picked from commit 3255e073fa110a3354f1265853bbf531c677f6ea) --- .../src/main/java/org/dspace/content/BitstreamServiceImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 9bc863860967..6abe9947e9b4 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -276,6 +276,7 @@ public void delete(Context context, Bitstream bitstream) throws SQLException, Au //Remove our bitstream from all our bundles final List bundles = bitstream.getBundles(); for (Bundle bundle : bundles) { + authorizeService.authorizeAction(context, bundle, Constants.REMOVE); //We also need to remove the bitstream id when it's set as bundle's primary bitstream if(bitstream.equals(bundle.getPrimaryBitstream())) { bundle.unsetPrimaryBitstreamID(); From 6b35f9e449c84b73d7e7c34dc362940c1000ff70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Thu, 19 Oct 2023 10:16:38 +0100 Subject: [PATCH 468/686] adding missing bundle REMOVE authorization (cherry picked from commit 4a05600194fb9be7e19084f3a9106a0152fd0d80) --- dspace-api/src/test/java/org/dspace/content/BundleTest.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/content/BundleTest.java b/dspace-api/src/test/java/org/dspace/content/BundleTest.java index 13b943b4d6b4..851d8267ea84 100644 --- a/dspace-api/src/test/java/org/dspace/content/BundleTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BundleTest.java @@ -522,6 +522,8 @@ public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID() throws IOExcepti doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); // Allow Bundle ADD permissions doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD); + // Allow Bundle REMOVE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); // Allow Bitstream WRITE permissions doNothing().when(authorizeServiceSpy) .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); From 55eba18a478dcf71b4361f54662afaa9e2e72d9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Thu, 19 Oct 2023 11:16:26 +0100 Subject: [PATCH 469/686] add missing head style check (cherry picked from commit caba4bbb96f56c103c4dd8ac9f9fa5863b40e04c) --- .../V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql index b1739dbd9600..c97d2246578e 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql @@ -1,3 +1,11 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + BEGIN; -- Remove all primary bitstreams that are marked as deleted From 09d5f6fcc95bf95f583c89de2c907fe28efcee1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Thu, 19 Oct 2023 11:42:58 +0100 Subject: [PATCH 470/686] fix style errors (cherry picked from commit 74605f159af5e53a3e890f578732a858cef12e51) --- .../src/main/java/org/dspace/content/BitstreamServiceImpl.java | 2 +- dspace-api/src/test/java/org/dspace/content/BundleTest.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 6abe9947e9b4..691d38f03039 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -278,7 +278,7 @@ public void delete(Context context, Bitstream bitstream) throws SQLException, Au for (Bundle bundle : bundles) { authorizeService.authorizeAction(context, bundle, Constants.REMOVE); //We also need to remove the bitstream id when it's set as bundle's primary bitstream - if(bitstream.equals(bundle.getPrimaryBitstream())) { + if (bitstream.equals(bundle.getPrimaryBitstream())) { bundle.unsetPrimaryBitstreamID(); } bundle.removeBitstream(bitstream); diff --git a/dspace-api/src/test/java/org/dspace/content/BundleTest.java b/dspace-api/src/test/java/org/dspace/content/BundleTest.java index 851d8267ea84..4af64b81cb0c 100644 --- a/dspace-api/src/test/java/org/dspace/content/BundleTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BundleTest.java @@ -517,7 +517,8 @@ public void testRemoveBitstreamAuth() throws SQLException, AuthorizeException, I * Test removeBitstream method and also the unsetPrimaryBitstreamID method, of class Bundle. */ @Test - public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID() throws IOException, SQLException, AuthorizeException { + public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID() + throws IOException, SQLException, AuthorizeException { // Allow Item WRITE permissions doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); // Allow Bundle ADD permissions From 024c47559e55cfcd1b0f153309c6280705e3a93f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 30 Oct 2023 11:27:18 +0000 Subject: [PATCH 471/686] new testDeleteBitstreamAndUnsetPrimaryBitstreamID test for primary bitstream verification (cherry picked from commit e6d108a94e41e58d6d701f3ef0429fda438e6555) --- .../org/dspace/content/BitstreamTest.java | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java index 921e4efcc7d8..30ef5f37fb58 100644 --- a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java @@ -432,6 +432,55 @@ public void testDeleteAndExpunge() throws IOException, SQLException, AuthorizeEx assertThat("testExpunge 0", bitstreamService.find(context, bitstreamId), nullValue()); } + /** + * Test of delete method, of class Bitstream. + */ + @Test + public void testDeleteBitstreamAndUnsetPrimaryBitstreamID() + throws IOException, SQLException, AuthorizeException { + + context.turnOffAuthorisationSystem(); + + Community owningCommunity = communityService.create(null, context); + Collection collection = collectionService.create(context, owningCommunity); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + Item item = installItemService.installItem(context, workspaceItem); + Bundle b = bundleService.create(context, item, "TESTBUNDLE"); + + // Allow Item WRITE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); + // Allow Bundle ADD permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD); + // Allow Bundle REMOVE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); + // Allow Bitstream WRITE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); + // Allow Bitstream DELETE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE)); + + //set a value different than default + File f = new File(testProps.get("test.bitstream").toString()); + + // Create a new bitstream, which we can delete. + Bitstream bs = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, bs); + // set primary bitstream + b.setPrimaryBitstreamID(bs); + context.restoreAuthSystemState(); + + // Test that delete will flag the bitstream as deleted + assertFalse("testDeleteBitstreamAndUnsetPrimaryBitstreamID 0", bs.isDeleted()); + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(bs)); + // Delete bitstream + bitstreamService.delete(context, bs); + assertTrue("testDeleteBitstreamAndUnsetPrimaryBitstreamID 2", bs.isDeleted()); + + // Now test if the primary bitstream was unset from bundle + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 3", b.getPrimaryBitstream(), equalTo(null)); + } + /** * Test of retrieve method, of class Bitstream. */ From 41640bbb653bb75741dd2131a168c1cdb40730cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 30 Oct 2023 11:45:12 +0000 Subject: [PATCH 472/686] new testDeleteBitstreamAndUnsetPrimaryBitstreamID test for primary bitstream verification (cherry picked from commit ad0d22a13a35a2167557efeb5ddea7a3a504424d) --- .../java/org/dspace/content/BitstreamTest.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java index 30ef5f37fb58..eb3de96d2fd1 100644 --- a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java @@ -464,18 +464,18 @@ public void testDeleteBitstreamAndUnsetPrimaryBitstreamID() File f = new File(testProps.get("test.bitstream").toString()); // Create a new bitstream, which we can delete. - Bitstream bs = bitstreamService.create(context, new FileInputStream(f)); - bundleService.addBitstream(context, b, bs); + Bitstream delBS = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, delBS); // set primary bitstream - b.setPrimaryBitstreamID(bs); + b.setPrimaryBitstreamID(delBS); context.restoreAuthSystemState(); // Test that delete will flag the bitstream as deleted - assertFalse("testDeleteBitstreamAndUnsetPrimaryBitstreamID 0", bs.isDeleted()); - assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(bs)); + assertFalse("testDeleteBitstreamAndUnsetPrimaryBitstreamID 0", delBS.isDeleted()); + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(delBS)); // Delete bitstream - bitstreamService.delete(context, bs); - assertTrue("testDeleteBitstreamAndUnsetPrimaryBitstreamID 2", bs.isDeleted()); + bitstreamService.delete(context, delBS); + assertTrue("testDeleteBitstreamAndUnsetPrimaryBitstreamID 2", delBS.isDeleted()); // Now test if the primary bitstream was unset from bundle assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 3", b.getPrimaryBitstream(), equalTo(null)); From 68e8894169f97840de1748568ba97dc30a1bee60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 30 Oct 2023 13:08:53 +0000 Subject: [PATCH 473/686] new testDeleteBitstreamAndUnsetPrimaryBitstreamID remove unnecessary stubs (cherry picked from commit a3e506c7f452133e3cc973705d671dba61a469d6) --- .../src/test/java/org/dspace/content/BitstreamTest.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java index eb3de96d2fd1..e85a0fc7b78d 100644 --- a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java @@ -447,10 +447,6 @@ public void testDeleteBitstreamAndUnsetPrimaryBitstreamID() Item item = installItemService.installItem(context, workspaceItem); Bundle b = bundleService.create(context, item, "TESTBUNDLE"); - // Allow Item WRITE permissions - doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); - // Allow Bundle ADD permissions - doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD); // Allow Bundle REMOVE permissions doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); // Allow Bitstream WRITE permissions From 412d62aa4300fc8d0bde834c9010cbd564c3a3ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 30 Oct 2023 22:48:49 +0000 Subject: [PATCH 474/686] make comments more clear to understand (cherry picked from commit c0bbd9d91f894fbe26f8cf7c4f166da8ba1cefd3) --- .../V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql index c97d2246578e..7a0bae1582d4 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql @@ -8,7 +8,7 @@ BEGIN; --- Remove all primary bitstreams that are marked as deleted +-- Unset any primary bitstream that is marked as deleted UPDATE bundle SET primary_bitstream_id = NULL WHERE primary_bitstream_id IN @@ -17,7 +17,7 @@ WHERE primary_bitstream_id IN INNER JOIN bundle as bl ON bs.uuid = bl.primary_bitstream_id WHERE bs.deleted IS TRUE ); --- Remove all primary bitstreams that don't make part on bundle's bitstreams +-- Unset any primary bitstream that don't belong to bundle's bitstreams list UPDATE bundle SET primary_bitstream_id = NULL WHERE primary_bitstream_id IN @@ -31,4 +31,4 @@ WHERE primary_bitstream_id IN ) ); -COMMIT; \ No newline at end of file +COMMIT; From c601ac3475602b99957d89d2875dea0f7a7303a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 30 Oct 2023 22:49:31 +0000 Subject: [PATCH 475/686] typo (cherry picked from commit 74cce86afcc163c52502892556679e6175fa1948) --- .../V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql index 7a0bae1582d4..9dd2f54a43eb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql @@ -17,7 +17,7 @@ WHERE primary_bitstream_id IN INNER JOIN bundle as bl ON bs.uuid = bl.primary_bitstream_id WHERE bs.deleted IS TRUE ); --- Unset any primary bitstream that don't belong to bundle's bitstreams list +-- Unset any primary bitstream that don't belong to bundle's bitstream list UPDATE bundle SET primary_bitstream_id = NULL WHERE primary_bitstream_id IN From 8be45918445a3189af749304fe71499899d62227 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 31 Oct 2023 16:20:33 +0100 Subject: [PATCH 476/686] [DSC-1319] Refactoring of ROR integration --- .../authority/RorOrgUnitAuthority.java | 50 +++- .../provider/impl/RorOrgUnitDataProvider.java | 69 ----- .../external/ror/service/RorFieldMapping.java | 38 +++ .../RorImportMetadataSourceServiceImpl.java | 263 ++++++++++++++++++ .../java/org/dspace/ror/ROROrgUnitDTO.java | 87 ------ .../org/dspace/ror/ROROrgUnitListDTO.java | 36 --- .../org/dspace/ror/client/RORApiClient.java | 21 -- .../dspace/ror/client/RORApiClientImpl.java | 163 ----------- .../org/dspace/ror/service/RORApiService.java | 28 -- .../dspace/ror/service/RORApiServiceImpl.java | 123 -------- .../spring-dspace-addon-import-services.xml | 6 + dspace/config/modules/ror.cfg | 6 +- dspace/config/spring/api/core-services.xml | 3 - .../config/spring/api/external-services.xml | 6 +- dspace/config/spring/api/ror-integration.xml | 35 +++ 15 files changed, 382 insertions(+), 552 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/external/provider/impl/RorOrgUnitDataProvider.java create mode 100644 dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java create mode 100644 dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java delete mode 100644 dspace-api/src/main/java/org/dspace/ror/ROROrgUnitDTO.java delete mode 100644 dspace-api/src/main/java/org/dspace/ror/ROROrgUnitListDTO.java delete mode 100644 dspace-api/src/main/java/org/dspace/ror/client/RORApiClient.java delete mode 100644 dspace-api/src/main/java/org/dspace/ror/client/RORApiClientImpl.java delete mode 100644 dspace-api/src/main/java/org/dspace/ror/service/RORApiService.java delete mode 100644 dspace-api/src/main/java/org/dspace/ror/service/RORApiServiceImpl.java create mode 100644 dspace/config/spring/api/ror-integration.xml diff --git a/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java index 09f7330b62fe..c733b95fc089 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java @@ -8,21 +8,25 @@ package org.dspace.content.authority; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.dspace.content.authority.factory.ItemAuthorityServiceFactory; -import org.dspace.ror.ROROrgUnitDTO; -import org.dspace.ror.service.RORApiService; -import org.dspace.ror.service.RORApiServiceImpl; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.ror.service.RorImportMetadataSourceServiceImpl; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; public class RorOrgUnitAuthority extends ItemAuthority { - private final RORApiService rorApiService = dspace.getSingletonService(RORApiServiceImpl.class); + private final RorImportMetadataSourceServiceImpl rorImportMetadataSource = new DSpace().getServiceManager() + .getServicesByType(RorImportMetadataSourceServiceImpl.class).get(0); + private final ItemAuthorityServiceFactory itemAuthorityServiceFactory = dspace.getServiceManager().getServiceByName("itemAuthorityServiceFactory", ItemAuthorityServiceFactory.class); private final ConfigurationService configurationService = @@ -32,18 +36,20 @@ public class RorOrgUnitAuthority extends ItemAuthority { @Override public Choices getMatches(String text, int start, int limit, String locale) { + super.setPluginInstanceName(authorityName); Choices solrChoices = super.getMatches(text, start, limit, locale); - return solrChoices.values.length == 0 ? getRORApiMatches(text, start, limit) : solrChoices; + try { + return solrChoices.values.length == 0 ? getRORApiMatches(text, start, limit) : solrChoices; + } catch (MetadataSourceException e) { + throw new RuntimeException(e); + } } - private Choices getRORApiMatches(String text, int start, int limit) { - Choice[] rorApiChoices = getChoiceFromRORQueryResults( - rorApiService.getOrgUnits(text).stream() - .filter(ou -> "active".equals(ou.getStatus())) - .collect(Collectors.toList()) - ).toArray(new Choice[0]); + private Choices getRORApiMatches(String text, int start, int limit) throws MetadataSourceException { + Choice[] rorApiChoices = getChoiceFromRORQueryResults(rorImportMetadataSource.getRecords(text, 0, 0)) + .toArray(new Choice[0]); int confidenceValue = itemAuthorityServiceFactory.getInstance(authorityName) .getConfidenceForChoices(rorApiChoices); @@ -52,15 +58,29 @@ private Choices getRORApiMatches(String text, int start, int limit) { rorApiChoices.length > (start + limit), 0); } - private List getChoiceFromRORQueryResults(List orgUnits) { + private List getChoiceFromRORQueryResults(Collection orgUnits) { return orgUnits .stream() - .map(orgUnit -> new Choice(composeAuthorityValue(orgUnit.getIdentifier()), orgUnit.getName(), - orgUnit.getName(), buildExtras(orgUnit))) + .map(orgUnit -> new Choice(composeAuthorityValue(getIdentifier(orgUnit)), getName(orgUnit), + getName(orgUnit), buildExtras(orgUnit))) .collect(Collectors.toList()); } - private Map buildExtras(ROROrgUnitDTO orgUnit) { + private String getIdentifier(ImportRecord orgUnit) { + return orgUnit.getValue("organization", "identifier", "ror").stream() + .findFirst() + .map(metadata -> metadata.getValue()) + .orElse(null); + } + + private String getName(ImportRecord orgUnit) { + return orgUnit.getValue("dc", "title", null).stream() + .findFirst() + .map(metadata -> metadata.getValue()) + .orElse(null); + } + + private Map buildExtras(ImportRecord orgUnit) { return new HashMap<>(); } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/RorOrgUnitDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/RorOrgUnitDataProvider.java deleted file mode 100644 index 76a38796d5f6..000000000000 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/RorOrgUnitDataProvider.java +++ /dev/null @@ -1,69 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.external.provider.impl; - -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.AbstractExternalDataProvider; -import org.dspace.ror.ROROrgUnitDTO; -import org.dspace.ror.service.RORApiService; -import org.springframework.beans.factory.annotation.Autowired; - -public class RorOrgUnitDataProvider extends AbstractExternalDataProvider { - - @Autowired - private RORApiService rorApiService; - - private String sourceIdentifier; - - @Override - public Optional getExternalDataObject(String id) { - return rorApiService.getOrgUnit(id) - .map(this::convertToExternalDataObject); - } - - @Override - public List searchExternalDataObjects(String query, int start, int limit) { - return rorApiService.getOrgUnits(query).stream() - .map(this::convertToExternalDataObject) - .collect(Collectors.toList()); - } - - private ExternalDataObject convertToExternalDataObject(ROROrgUnitDTO orgUnit) { - ExternalDataObject object = new ExternalDataObject(sourceIdentifier); - object.setId(orgUnit.getIdentifier()); - object.setValue(orgUnit.getName()); - object.setDisplayValue(orgUnit.getName()); - object.setMetadata(rorApiService.getMetadataValues(orgUnit)); - return object; - } - - @Override - public boolean supports(String source) { - return StringUtils.equals(sourceIdentifier, source); - } - - @Override - public int getNumberOfResults(String query) { - return searchExternalDataObjects(query, 0, -1).size(); - } - - public void setSourceIdentifier(String sourceIdentifier) { - this.sourceIdentifier = sourceIdentifier; - } - - @Override - public String getSourceIdentifier() { - return sourceIdentifier; - } - -} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java new file mode 100644 index 000000000000..5248d793e292 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorFieldMapping.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ror.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class RorFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "rorMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..cfd9300f0e48 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java @@ -0,0 +1,263 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ror.service; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +public class RorImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + private int timeout = 1000; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "ror"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for ROR"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for ROR"); + } + + @Override + public void init() throws Exception { + } + + /** + * This class is a Callable implementation to get ADS entries based on query + * object. This Callable use as query value the string queryString passed to + * constructor. If the object will be construct through Query.class instance, a + * Query's map entry with key "query" will be used. Pagination is supported too, + * using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + return search(query.getParameterAsClass("query", String.class)); + } + } + + /** + * This class is a Callable implementation to get an ADS entry using bibcode The + * bibcode to use can be passed through the constructor as a String or as + * Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + return searchById(query.getParameterAsClass("id", String.class)); + } + } + + /** + * This class is a Callable implementation to count the number of entries for an + * ADS query. This Callable use as query value to ADS the string queryString + * passed to constructor. If the object will be construct through Query.class + * instance, the value of the Query's map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + return count(query.getParameterAsClass("query", String.class)); + } + } + + public Integer count(String query) { + try { + Map> params = new HashMap>(); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("query", query); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return 0; + } + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + return jsonNode.at("/number_of_results").asInt(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return 0; + } + + private List searchById(String id) { + + List adsResults = new ArrayList<>(); + try { + Map> params = new HashMap>(); + + URIBuilder uriBuilder = new URIBuilder(this.url + "/" + id); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return adsResults; + } + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + adsResults.add(transformSourceRecords(jsonNode.toString())); + + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return adsResults; + } + + private List search(String query) { + List adsResults = new ArrayList<>(); + try { + Map> params = new HashMap>(); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("query", query); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + if (StringUtils.isEmpty(resp)) { + return adsResults; + } + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + JsonNode docs = jsonNode.at("/items"); + if (docs.isArray()) { + Iterator nodes = docs.elements(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + adsResults.add(transformSourceRecords(node.toString())); + } + } else { + adsResults.add(transformSourceRecords(docs.toString())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return adsResults; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/ror/ROROrgUnitDTO.java b/dspace-api/src/main/java/org/dspace/ror/ROROrgUnitDTO.java deleted file mode 100644 index 9eac4494fc59..000000000000 --- a/dspace-api/src/main/java/org/dspace/ror/ROROrgUnitDTO.java +++ /dev/null @@ -1,87 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.ror; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.commons.lang3.StringUtils; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class ROROrgUnitDTO { - - @JsonProperty("id") - private String url; - - private String name; - - private String[] acronyms; - - private String[] aliases; - - private String status; - - private String[] types; - - public String getIdentifier() { - if (StringUtils.isBlank(url)) { - return null; - } - - String[] splittedUrl = url.split("/"); - return splittedUrl[splittedUrl.length - 1]; - } - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String[] getAcronyms() { - return acronyms; - } - - public void setAcronyms(String[] acronyms) { - this.acronyms = acronyms; - } - - public String[] getAliases() { - return aliases; - } - - public void setAliases(String[] aliases) { - this.aliases = aliases; - } - - public String getStatus() { - return status; - } - - public void setStatus(String status) { - this.status = status; - } - - public String[] getTypes() { - return types; - } - - public void setTypes(String[] types) { - this.types = types; - } -} diff --git a/dspace-api/src/main/java/org/dspace/ror/ROROrgUnitListDTO.java b/dspace-api/src/main/java/org/dspace/ror/ROROrgUnitListDTO.java deleted file mode 100644 index 1b3b5c7c9593..000000000000 --- a/dspace-api/src/main/java/org/dspace/ror/ROROrgUnitListDTO.java +++ /dev/null @@ -1,36 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.ror; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class ROROrgUnitListDTO { - private ROROrgUnitDTO[] items; - - @JsonProperty(value = "number_of_results") - private int total; - - public ROROrgUnitDTO[] getItems() { - return items; - } - - public void setItems(ROROrgUnitDTO[] items) { - this.items = items; - } - - public int getTotal() { - return total; - } - - public void setTotal(int total) { - this.total = total; - } -} diff --git a/dspace-api/src/main/java/org/dspace/ror/client/RORApiClient.java b/dspace-api/src/main/java/org/dspace/ror/client/RORApiClient.java deleted file mode 100644 index 5e2682c3a002..000000000000 --- a/dspace-api/src/main/java/org/dspace/ror/client/RORApiClient.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.ror.client; - -import java.util.List; -import java.util.Optional; - -import org.dspace.ror.ROROrgUnitDTO; - -public interface RORApiClient { - - List searchOrganizations(String text); - - Optional findOrganizationByRORId(String rorId); -} diff --git a/dspace-api/src/main/java/org/dspace/ror/client/RORApiClientImpl.java b/dspace-api/src/main/java/org/dspace/ror/client/RORApiClientImpl.java deleted file mode 100644 index c6934de86d00..000000000000 --- a/dspace-api/src/main/java/org/dspace/ror/client/RORApiClientImpl.java +++ /dev/null @@ -1,163 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.ror.client; - -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.apache.http.client.methods.RequestBuilder.get; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Optional; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.io.IOUtils; -import org.apache.http.HttpEntity; -import org.apache.http.HttpResponse; -import org.apache.http.HttpStatus; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.dspace.ror.ROROrgUnitDTO; -import org.dspace.ror.ROROrgUnitListDTO; -import org.dspace.services.ConfigurationService; -import org.springframework.beans.factory.annotation.Autowired; - -public class RORApiClientImpl implements RORApiClient { - - public static final int TIMEOUT_MS = 15 * 1000; - - @Autowired - private ConfigurationService configurationService; - - private final ObjectMapper objectMapper = new ObjectMapper(); - - private final RequestConfig requestConfig = RequestConfig.custom() - .setConnectTimeout(TIMEOUT_MS) - .setConnectionRequestTimeout(TIMEOUT_MS) - .setSocketTimeout(TIMEOUT_MS) - .build(); - - @Override - public List searchOrganizations(String text) { - RorResponse response = performGetRequest(buildGetWithQueryExact(getRORApiUrl(), text.trim())); - - if (isNotFound(response)) { - return Collections.emptyList(); - } - - if (isNotSuccessful(response)) { - String message = "ROR API request was not successful. " - + "Status: " + response.getStatusCode() + " - Content: " + response.getContent(); - throw new RuntimeException(message); - } - - ROROrgUnitListDTO orgUnits = parseResponse(response, ROROrgUnitListDTO.class); - - return List.of(orgUnits.getItems()); - } - - @Override - public Optional findOrganizationByRORId(String rorId) { - RorResponse response = performGetRequest(buildGetWithRORId(getRORApiUrl(), rorId)); - - if (isNotFound(response)) { - return Optional.empty(); - } - - if (isNotSuccessful(response)) { - String message = "ROR API request was not successful. " - + "Status: " + response.getStatusCode() + " - Content: " + response.getContent(); - throw new RuntimeException(message); - } - - ROROrgUnitDTO orgUnit = parseResponse(response, ROROrgUnitDTO.class); - - return Optional.ofNullable(orgUnit); - } - - private RorResponse performGetRequest(HttpUriRequest request) { - try (CloseableHttpClient httpClient = HttpClientBuilder.create().build()) { - CloseableHttpResponse httpResponse = httpClient.execute(request); - int statusCode = getStatusCode(httpResponse); - HttpEntity entity = httpResponse.getEntity(); - return new RorResponse(statusCode, getContent(httpResponse)); -// return httpResponse; - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private HttpUriRequest buildGetWithRORId(String url, String rorId) { - return get(url + "/" + rorId).setConfig(requestConfig).build(); - } - - private HttpUriRequest buildGetWithQuery(String url, String value) { - return get(url).addParameter("query", value).setConfig(requestConfig).build(); - } - - private HttpUriRequest buildGetWithQueryExact(String url, String value) { - return get(url).addParameter("query", "\"" + value + "\"").setConfig(requestConfig).build(); - } - - private T parseResponse(RorResponse response, Class clazz) { - try { - return objectMapper.readValue(response.getContent(), clazz); - } catch (UnsupportedOperationException | IOException e) { - throw new RuntimeException(e); - } - } - - private String getContent(HttpResponse response) { - try { - HttpEntity entity = response.getEntity(); - return entity != null ? IOUtils.toString(entity.getContent(), UTF_8) : null; - } catch (UnsupportedOperationException | IOException e) { - throw new RuntimeException(e); - } - } - - private boolean isNotSuccessful(RorResponse response) { - int statusCode = response.getStatusCode(); - return statusCode < 200 || statusCode > 299; - } - - private boolean isNotFound(RorResponse response) { - return response.getStatusCode() == HttpStatus.SC_NOT_FOUND; - } - - private int getStatusCode(HttpResponse response) { - return response.getStatusLine().getStatusCode(); - } - - private String getRORApiUrl() { - return configurationService.getProperty("ror.orgunit-import.api-url"); - } - - private static class RorResponse { - private final int statusCode; - private final String content; - - public RorResponse(int statusCode, String content) { - - this.statusCode = statusCode; - this.content = content; - } - - public int getStatusCode() { - return statusCode; - } - - public String getContent() { - return content; - } - } -} diff --git a/dspace-api/src/main/java/org/dspace/ror/service/RORApiService.java b/dspace-api/src/main/java/org/dspace/ror/service/RORApiService.java deleted file mode 100644 index fffaa14ee777..000000000000 --- a/dspace-api/src/main/java/org/dspace/ror/service/RORApiService.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.ror.service; -import java.util.List; -import java.util.Optional; - -import org.dspace.content.dto.MetadataValueDTO; -import org.dspace.ror.ROROrgUnitDTO; - -public interface RORApiService { - - public List getOrgUnits(String query); - - public Optional getOrgUnit(String rorId); - - public List getMetadataValues(ROROrgUnitDTO orgUnit); - - public List getMetadataValues(String rorId); - - public List getMetadataFields(); - -} diff --git a/dspace-api/src/main/java/org/dspace/ror/service/RORApiServiceImpl.java b/dspace-api/src/main/java/org/dspace/ror/service/RORApiServiceImpl.java deleted file mode 100644 index cff214fe3f11..000000000000 --- a/dspace-api/src/main/java/org/dspace/ror/service/RORApiServiceImpl.java +++ /dev/null @@ -1,123 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.ror.service; - -import static java.util.Optional.ofNullable; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.dspace.content.dto.MetadataValueDTO; -import org.dspace.ror.ROROrgUnitDTO; -import org.dspace.ror.client.RORApiClient; -import org.dspace.services.ConfigurationService; -import org.springframework.beans.factory.annotation.Autowired; - -public class RORApiServiceImpl implements RORApiService { - - private static final String ORGUNIT_MAPPING_PREFIX = "ror.orgunit-import.api.metadata-field."; - - @Autowired - private RORApiClient apiClient; - - @Autowired - private ConfigurationService configurationService; - - @Override - public List getOrgUnits(String query) { - return apiClient.searchOrganizations(query); - } - - @Override - public Optional getOrgUnit(String rorId) { - return apiClient.findOrganizationByRORId(rorId); - } - - @Override - public List getMetadataValues(String rorId) { - return getOrgUnit(rorId) - .map(this::getMetadataValues) - .orElse(getInactiveMetadataField()); - } - - @Override - public List getMetadataFields() { - return configurationService.getPropertyKeys(ORGUNIT_MAPPING_PREFIX).stream() - .map(key -> configurationService.getProperty(key)) - .filter(this::isMetadataField) - .collect(Collectors.toList()); - } - - @Override - public List getMetadataValues(ROROrgUnitDTO orgUnit) { - - List metadataValues = new ArrayList<>(); - - getPersonMetadataField("name") - .flatMap(field -> getMetadataValue(orgUnit.getName(), field)) - .ifPresent(metadataValues::add); - - getPersonMetadataField("acronym") - .flatMap(field -> getMetadataArrayValue(orgUnit.getAcronyms(), field)) - .ifPresent(metadataValues::add); - - getPersonMetadataField("url") - .flatMap(field -> getMetadataValue(orgUnit.getUrl(), field)) - .ifPresent(metadataValues::add); - - getPersonMetadataField("identifier") - .flatMap(field -> getMetadataValue(orgUnit.getIdentifier(), field)) - .ifPresent(metadataValues::add); - - return metadataValues; - - } - - private List getInactiveMetadataField() { - return getPersonMetadataField("active") - .flatMap(field -> getMetadataValue("false", field)) - .map(List::of) - .orElse(List.of()); - } - - private Optional getMetadataValue(String value, String field) { - return Optional.ofNullable(value) - .filter(StringUtils::isNotBlank) - .map(metadataValue -> new MetadataValueDTO(field, metadataValue)); - } - - private Optional getMetadataArrayValue(String[] values, String field) { - String joinedAcronym = Arrays.stream(values) - .filter(StringUtils::isNotBlank) - .collect(Collectors.joining("/")); - return StringUtils.isNotEmpty(joinedAcronym) - ? Optional.of(new MetadataValueDTO(field, joinedAcronym)) - : Optional.empty(); - } - - private boolean isMetadataField(String property) { - return property != null && property.contains("."); - } - - private Optional getPersonMetadataField(String fieldName) { - return ofNullable(configurationService.getProperty(ORGUNIT_MAPPING_PREFIX + fieldName)); - } - - public RORApiClient getApiClient() { - return apiClient; - } - - public void setApiClient(RORApiClient apiClient) { - this.apiClient = apiClient; - } -} diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index e5b943c5c20f..a20d47c30e83 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -157,6 +157,12 @@ + + + + + + diff --git a/dspace/config/modules/ror.cfg b/dspace/config/modules/ror.cfg index f92b96c35d21..4a7a6cfa4fac 100644 --- a/dspace/config/modules/ror.cfg +++ b/dspace/config/modules/ror.cfg @@ -1,6 +1,2 @@ ror.orgunit-import.api-url = https://api.ror.org/organizations -ror.authority.prefix = will be referenced::ROR-ID:: -ror.orgunit-import.api.metadata-field.name = dc.title -ror.orgunit-import.api.metadata-field.acronym = oairecerif.acronym -ror.orgunit-import.api.metadata-field.url = oairecerif.identifier.url -ror.orgunit-import.api.metadata-field.identifier = organization.identifier.ror \ No newline at end of file +ror.authority.prefix = will be referenced::ROR-ID:: \ No newline at end of file diff --git a/dspace/config/spring/api/core-services.xml b/dspace/config/spring/api/core-services.xml index f675c7bfc480..fbd75389170a 100644 --- a/dspace/config/spring/api/core-services.xml +++ b/dspace/config/spring/api/core-services.xml @@ -199,8 +199,5 @@ - - - diff --git a/dspace/config/spring/api/external-services.xml b/dspace/config/spring/api/external-services.xml index 1d91f89a053e..527933512172 100644 --- a/dspace/config/spring/api/external-services.xml +++ b/dspace/config/spring/api/external-services.xml @@ -231,8 +231,10 @@ - - + + + + OrgUnit diff --git a/dspace/config/spring/api/ror-integration.xml b/dspace/config/spring/api/ror-integration.xml new file mode 100644 index 000000000000..cee02dfc6942 --- /dev/null +++ b/dspace/config/spring/api/ror-integration.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + From 01673c40292595d304e2a36660e5c97ad17e0100 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Thu, 2 Nov 2023 17:49:16 +0100 Subject: [PATCH 477/686] [DSC-1319] Finalized ror integration mapping --- .../RorParentOrgUnitMetadataContributor.java | 109 ++++++++++++++++++ .../RorImportMetadataSourceServiceImpl.java | 3 + dspace/config/spring/api/ror-integration.xml | 76 ++++++++++++ 3 files changed, 188 insertions(+) create mode 100644 dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java new file mode 100644 index 000000000000..be1910d7a521 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/RorParentOrgUnitMetadataContributor.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +public class RorParentOrgUnitMetadataContributor extends SimpleJsonPathMetadataContributor { + + private String typeField; + + private String parentType; + + private String labelField; + + /** + * Retrieve the metadata associated with the given object. + * The toString() of the resulting object will be used. + * + * @param fullJson A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(String fullJson) { + + Collection metadata = new ArrayList<>(); + Collection metadataValue = new ArrayList<>(); + + JsonNode jsonNode = convertStringJsonToJsonNode(fullJson); + JsonNode array = jsonNode.at(getQuery()); + if (!array.isArray()) { + return metadata; + } + + Iterator nodes = array.iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + + if (!node.has(labelField)) { + continue; + } + + String type = node.has(typeField) ? node.get(typeField).asText() : null; + String label = node.get(labelField).asText(); + + if (parentType.equalsIgnoreCase(type)) { + metadataValue.add(label); + } + + } + + for (String value : metadataValue) { + MetadatumDTO metadatumDto = new MetadatumDTO(); + metadatumDto.setValue(value); + metadatumDto.setElement(getField().getElement()); + metadatumDto.setQualifier(getField().getQualifier()); + metadatumDto.setSchema(getField().getSchema()); + metadata.add(metadatumDto); + } + return metadata; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + return body; + } + + public String getTypeField() { + return typeField; + } + + public void setTypeField(String typeField) { + this.typeField = typeField; + } + + public String getLabelField() { + return labelField; + } + + public void setLabelField(String labelField) { + this.labelField = labelField; + } + + public String getParentType() { + return parentType; + } + + public void setParentType(String parentType) { + this.parentType = parentType; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java index cfd9300f0e48..ebc7caefb26d 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java @@ -198,6 +198,9 @@ public Integer count(String query) { private List searchById(String id) { List adsResults = new ArrayList<>(); + + id = StringUtils.removeStart(id, "https://ror.org/"); + try { Map> params = new HashMap>(); diff --git a/dspace/config/spring/api/ror-integration.xml b/dspace/config/spring/api/ror-integration.xml index cee02dfc6942..8d1dce46162e 100644 --- a/dspace/config/spring/api/ror-integration.xml +++ b/dspace/config/spring/api/ror-integration.xml @@ -14,6 +14,14 @@ + + + + + + + + @@ -32,4 +40,72 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 980fa990ec9fb96cd9d058fff0ff9654470eef22 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 2 Nov 2023 17:04:31 +0100 Subject: [PATCH 478/686] Merge tag 'dspace-7.6' into dspace-cris-7 [maven-release-plugin] copy for tag dspace-7.6 --- .codecov.yml | 7 - .dockerignore | 1 - .github/workflows/build.yml | 37 +- .github/workflows/docker.yml | 83 + .github/workflows/issue_opened.yml | 2 +- .github/workflows/label_merge_conflicts.yml | 2 +- Dockerfile | 2 +- Dockerfile.cli | 6 +- LICENSES_THIRD_PARTY | 381 ++- docker-compose.yml | 38 +- dspace-api/pom.xml | 9 +- .../ProcessCleanerConfiguration.java | 17 - .../bulkaccesscontrol/BulkAccessControl.java | 689 ++++ .../BulkAccessControlCli.java | 66 + ...lkAccessControlCliScriptConfiguration.java | 42 + .../BulkAccessControlScriptConfiguration.java | 110 + .../exception/BulkAccessControlException.java | 48 + .../model/AccessCondition.java | 59 + .../model/AccessConditionBitstream.java | 69 + .../model/AccessConditionItem.java | 45 + .../BulkAccessConditionConfiguration.java | 50 + .../model/BulkAccessControlInput.java | 72 + ...lkAccessConditionConfigurationService.java | 45 + .../MetadataDeletionScriptConfiguration.java | 17 - .../MetadataExportScriptConfiguration.java | 17 - .../MetadataImportScriptConfiguration.java | 16 - .../harvest/HarvestScriptConfiguration.java | 14 - .../ItemExportScriptConfiguration.java | 17 - .../org/dspace/app/itemimport/ItemImport.java | 82 +- .../dspace/app/itemimport/ItemImportCLI.java | 54 +- .../ItemImportCLIScriptConfiguration.java | 3 + .../ItemImportScriptConfiguration.java | 21 +- .../app/itemimport/ItemImportServiceImpl.java | 7 +- .../ImageMagickPdfThumbnailFilter.java | 4 +- .../ImageMagickThumbnailFilter.java | 22 +- .../ImageMagickVideoThumbnailFilter.java | 76 + .../MediaFilterScriptConfiguration.java | 19 - .../mediafilter/MediaFilterServiceImpl.java | 93 +- .../service/MediaFilterService.java | 18 + .../requestitem/RequestItemEmailNotifier.java | 88 +- .../dspace/app/requestitem/package-info.java | 15 +- ...rDatabaseResyncCliScriptConfiguration.java | 6 - .../java/org/dspace/app/util/DCInputSet.java | 4 + .../app/util/SubmissionConfigReader.java | 47 +- .../org/dspace/app/util/SyndicationFeed.java | 18 +- .../dspace/authenticate/IPAuthentication.java | 7 +- .../authenticate/LDAPAuthentication.java | 90 +- .../authorize/AuthorizeServiceImpl.java | 68 +- .../org/dspace/authorize/ResourcePolicy.java | 9 +- .../authorize/dao/ResourcePolicyDAO.java | 6 +- .../dao/impl/ResourcePolicyDAOImpl.java | 26 +- .../org/dspace/authorize/package-info.java | 67 + .../java/org/dspace/authorize/package.html | 68 - .../authorize/service/AuthorizeService.java | 29 +- .../service/ResourcePolicyService.java | 28 +- .../java/org/dspace/browse/BrowseIndex.java | 12 +- .../java/org/dspace/browse/ItemCounter.java | 45 +- .../java/org/dspace/browse/SolrBrowseDAO.java | 2 +- .../org/dspace/checker/CheckerCommand.java | 12 +- .../org/dspace/content/BundleServiceImpl.java | 95 +- .../java/org/dspace/content/Collection.java | 14 + .../dspace/content/CollectionServiceImpl.java | 33 + .../java/org/dspace/content/Community.java | 13 + .../dspace/content/CommunityServiceImpl.java | 15 +- .../org/dspace/content/ItemServiceImpl.java | 76 +- .../dspace/content/MetadataSchemaEnum.java | 3 +- .../org/dspace/content/MetadataValue.java | 2 +- .../RelationshipMetadataServiceImpl.java | 3 +- .../authority/ChoiceAuthorityServiceImpl.java | 56 +- .../DSpaceControlledVocabularyIndex.java | 47 + .../content/authority/SolrAuthority.java | 6 +- .../service/ChoiceAuthorityService.java | 4 + .../content/dao/impl/ProcessDAOImpl.java | 1 - .../content/service/CollectionService.java | 24 + .../content/service/CommunityService.java | 10 + .../dspace/content/service/ItemService.java | 20 + .../src/main/java/org/dspace/core/Utils.java | 12 +- .../ctask/general/MetadataWebService.java | 3 +- .../curate/CurationScriptConfiguration.java | 43 +- .../org/dspace/discovery/IndexClient.java | 62 +- .../IndexDiscoveryScriptConfiguration.java | 17 - .../org/dspace/discovery/IndexingUtils.java | 4 + .../org/dspace/discovery/SearchUtils.java | 100 +- .../org/dspace/discovery/SolrServiceImpl.java | 7 +- .../SolrServiceValuePairsIndexPlugin.java | 3 +- .../DiscoveryConfigurationService.java | 39 +- .../DiscoverySortConfiguration.java | 11 + .../CollectionIndexFactoryImpl.java | 4 +- .../CommunityIndexFactoryImpl.java | 4 +- .../InprogressSubmissionIndexFactoryImpl.java | 8 +- .../indexobject/ItemIndexFactoryImpl.java | 5 +- .../MetadataFieldIndexFactoryImpl.java | 1 + .../discovery/utils/DiscoverQueryBuilder.java | 2 +- .../org/dspace/eperson/GroupServiceImpl.java | 2 - .../main/java/org/dspace/event/Consumer.java | 31 +- .../java/org/dspace/event/package-info.java | 20 + .../provider/orcid/xml/Converter.java | 14 +- .../VersionedDOIIdentifierProvider.java | 47 +- .../VersionedHandleIdentifierProvider.java | 16 +- ...dentifierProviderWithCanonicalHandles.java | 31 +- .../dspace/identifier/doi/DOIConsumer.java | 1 - .../CiniiImportMetadataSourceServiceImpl.java | 6 + .../CrossRefDateMetadataProcessor.java | 79 + .../EpoImportMetadataSourceServiceImpl.java | 6 + .../service/LiveImportClientImpl.java | 16 +- .../PubmedDateMetadatumContributor.java | 16 +- ...PubmedImportMetadataSourceServiceImpl.java | 31 +- ...PubmedEuropeMetadataSourceServiceImpl.java | 4 + ...ScopusImportMetadataSourceServiceImpl.java | 4 + .../AbstractRemoteMetadataSource.java | 5 +- .../WOSImportMetadataSourceServiceImpl.java | 4 + .../java/org/dspace/orcid/OrcidHistory.java | 6 +- .../java/org/dspace/orcid/OrcidQueue.java | 4 +- .../OrcidBulkPushScriptConfiguration.java | 17 - .../main/java/org/dspace/scripts/Process.java | 4 + .../org/dspace/scripts/ScriptServiceImpl.java | 2 +- .../configuration/ScriptConfiguration.java | 28 +- .../statistics/SolrLoggerServiceImpl.java | 69 +- .../SolrLoggerUsageEventListener.java | 5 +- ...iledOpenUrlTrackerScriptConfiguration.java | 17 - .../statistics/service/SolrLoggerService.java | 6 + .../storage/bitstore/BaseBitStoreService.java | 32 +- .../storage/bitstore/BitStoreService.java | 5 +- .../bitstore/BitstreamStorageServiceImpl.java | 36 +- .../storage/bitstore/DSBitStoreService.java | 13 +- .../storage/bitstore/S3BitStoreService.java | 102 +- .../service/BitstreamStorageService.java | 2 +- .../dspace/storage/rdbms/DatabaseUtils.java | 45 +- .../rdbms/migration/MigrationUtils.java | 16 - ...Drop_constraint_for_DSpace_1_4_schema.java | 5 +- ...Drop_constraint_for_DSpace_1_6_schema.java | 5 +- ...adata_For_All_Objects_drop_constraint.java | 5 +- ...4_11_04__Enable_XMLWorkflow_Migration.java | 2 - ..._DS_2701_Enable_XMLWorkflow_Migration.java | 2 - ...nFormsMigrationCliScriptConfiguration.java | 17 - ...sionFormsMigrationScriptConfiguration.java | 36 +- .../submit/model/AccessConditionOption.java | 61 +- ...riptionEmailNotificationConfiguration.java | 16 - .../java/org/dspace/usage/UsageEvent.java | 15 + .../java/org/dspace/util/DateMathParser.java | 65 +- .../org/dspace/util/FrontendUrlService.java | 87 + .../util/MultiFormatDateDeserializer.java | 41 + .../util/SolrUpgradePre6xStatistics.java | 4 +- .../java/org/dspace/util/TimeHelpers.java | 42 + .../dspace/validation/MetadataValidator.java | 5 +- .../src/main/resources/Messages.properties | 3 + .../org/dspace/license/CreativeCommons.xsl | 4 +- .../org/dspace/license/LicenseCleanup.xsl | 4 +- .../oracle/upgradeToFlyway4x.sql | 29 - .../postgres/upgradeToFlyway4x.sql | 2 +- .../storage/rdbms/sqlmigration/h2/README.md | 28 +- .../h2/V7.0_2021.03.26__process_to_group.sql | 8 +- ....6_2023.03.17__Remove_unused_sequence.sql} | 4 +- ...date_PNG_in_bitstream_format_registry.sql} | 9 +- ..._and_history_descriptions_to_text_type.sql | 10 + ...04.19__process_parameters_to_text_type.sql | 9 + ...1__CollectionCommunity_Metadata_Handle.sql | 90 - .../rdbms/sqlmigration/oracle/README.md | 84 - ...tial_DSpace_1.2_Oracle_database_schema.sql | 550 --- .../V1.3__Upgrade_to_DSpace_1.3_schema.sql | 57 - ...V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql | 133 - .../V1.4__Upgrade_to_DSpace_1.4_schema.sql | 371 --- .../V1.5__Upgrade_to_DSpace_1.5_schema.sql | 142 - .../V1.6__Upgrade_to_DSpace_1.6_schema.sql | 93 - .../V1.7__Upgrade_to_DSpace_1.7_schema.sql | 20 - .../V1.8__Upgrade_to_DSpace_1.8_schema.sql | 23 - .../V3.0__Upgrade_to_DSpace_3.x_schema.sql | 52 - .../V4.0__Upgrade_to_DSpace_4.x_schema.sql | 88 - ....9_2015.10.26__DS-2818_registry_update.sql | 64 - ...08.08__DS-1945_Helpdesk_Request_a_Copy.sql | 20 - ...9.26__DS-1582_Metadata_For_All_Objects.sql | 333 -- .../oracle/V5.6_2016.08.23__DS-3097.sql | 24 - ..._metadatavalue_resource_type_id_column.sql | 23 - ...015.03.07__DS-2701_Hibernate_migration.sql | 469 --- ..._03_06_01__DS_3378_lost_oracle_indexes.sql | 18 - .../oracle/V6.0_2016.01.03__DS-3024.sql | 25 - ...02.25__DS-3004-slow-searching-as-admin.sql | 30 - ...04.01__DS-1955_Increase_embargo_reason.sql | 25 - ...016.04.04__DS-3086-OAI-Performance-fix.sql | 46 - ...125-fix-bundle-bitstream-delete-rights.sql | 33 - ...DS-3168-fix-requestitem_item_id_column.sql | 24 - .../oracle/V6.0_2016.07.21__DS-2775.sql | 30 - ...6.07.26__DS-3277_fix_handle_assignment.sql | 44 - .../oracle/V6.0_2016.08.23__DS-3097.sql | 24 - .../V6.0_2016.11.29__DS-3410-lost-indexes.sql | 17 - .../oracle/V6.0_2016.11.30__DS-3409.sql | 16 - ...2017.10.12__DS-3542-stateless-sessions.sql | 20 - .../V7.0_2018.04.16__dspace-entities.sql | 65 - .../V7.0_2018.06.07__DS-3851-permission.sql | 24 - ....05.02__DS-4239-workflow-xml-migration.sql | 17 - ..._2019.07.31__Retrieval_of_name_variant.sql | 18 - .../V7.0_2019_06_14__scripts-and-process.sql | 40 - ..._2020.01.08__DS-626-statistics-tracker.sql | 29 - ....18__Move_entity_type_to_dspace_schema.sql | 56 - ..._type_from_item_template_to_collection.sql | 28 - ...ction_table_drop_workflow_stem_columns.sql | 15 - ...DV_place_after_migrating_from_DSpace_5.sql | 24 - .../V7.3_2022.05.16__Orcid_token_table.sql | 24 - .../V7.3_2022.06.16__process_to_group.sql | 1 + ...on_status_column_to_relationship_table.sql | 10 - ...mter_change_columns_subscription_table.sql | 45 - ...5_2022.12.09__Supervision_Orders_table.sql | 78 - ...04.19__process_parameters_to_text_type.sql | 9 + .../sqlmigration/oracle/update-sequences.sql | 77 - .../rdbms/sqlmigration/postgres/README.md | 7 +- ....6_2023.03.17__Remove_unused_sequence.sql} | 8 +- ...date_PNG_in_bitstream_format_registry.sql} | 17 +- ..._and_history_descriptions_to_text_type.sql | 10 + ...04.19__process_parameters_to_text_type.sql | 9 + .../postgres/update-sequences.sql | 40 +- .../V5.7_2017.05.05__DS-3431.sql | 503 --- ...8.11__DS-2701_Basic_Workflow_Migration.sql | 37 - .../V6.1_2017.01.03__DS-3431.sql | 503 --- ....08.11__DS-2701_Xml_Workflow_Migration.sql | 141 - ....0_2018.04.03__upgrade_workflow_policy.sql | 27 - .../oracle/data_workflow_migration.sql | 377 --- .../v6.0__DS-2701_data_workflow_migration.sql | 377 --- .../v6.0__DS-2701_xml_workflow_migration.sql | 124 - .../oracle/xml_workflow_migration.sql | 124 - .../spring-dspace-addon-import-services.xml | 1 + ...ring-dspace-addon-requestitem-services.xml | 34 - .../spring/spring-dspace-core-services.xml | 15 - .../dspaceFolder/config/item-submission.xml | 22 +- .../test/data/dspaceFolder/config/local.cfg | 2 +- .../config/spring/api/access-conditions.xml | 30 + .../config/spring/api/identifier-service.xml | 13 +- .../config/spring/api/iiif-processing.xml | 5 +- .../config/spring/api/scripts.xml | 7 +- .../dspaceFolder/config/submission-forms.xml | 8 +- .../BulkAccessControlIT.java | 1860 +++++++++++ .../app/itemimport/ItemImportCLIIT.java | 25 + .../app/matcher/ResourcePolicyMatcher.java | 2 +- .../requestitem/JavaMailTestTransport.java | 65 + .../RequestItemEmailNotifierTest.java | 271 ++ .../builder/AbstractDSpaceObjectBuilder.java | 4 +- .../org/dspace/builder/CollectionBuilder.java | 25 + .../org/dspace/builder/CommunityBuilder.java | 24 +- .../java/org/dspace/builder/ItemBuilder.java | 4 +- .../org/dspace/builder/ProcessBuilder.java | 3 + .../dspace/builder/WorkspaceItemBuilder.java | 1 + .../org/dspace/content/CollectionTest.java | 3 - .../java/org/dspace/content/ItemTest.java | 74 +- .../test/java/org/dspace/core/UtilsTest.java | 6 + .../org/dspace/discovery/DiscoveryIT.java | 7 + ...VersionedHandleIdentifierProviderTest.java | 115 + .../canvasdimension/CanvasDimensionsIT.java | 34 + .../dspace/orcid/script/OrcidBulkPushIT.java | 56 + ...MockDSpaceRunnableScriptConfiguration.java | 17 - .../storage/bitstore/S3BitStoreServiceIT.java | 54 +- .../java/org/dspace/util/TimeHelpersTest.java | 34 + .../org/dspace/app/itemimport/test.pdf | Bin 0 -> 56812 bytes .../org/dspace/iiif/canvasdimension/cat.jp2 | Bin 0 -> 72794 bytes .../iiif/service/WordHighlightSolrSearch.java | 30 +- dspace-oai/pom.xml | 18 - .../AccessStatusElementItemCompilePlugin.java | 68 + dspace-server-webapp/pom.xml | 4 +- .../java/org/dspace/app/rest/Application.java | 17 +- .../rest/BitstreamCategoryRestController.java | 61 + .../app/rest/BitstreamRestController.java | 30 +- ...mOwningCollectionUpdateRestController.java | 27 +- .../app/rest/ItemTemplateRestController.java | 4 +- .../dspace/app/rest/OpenSearchController.java | 2 +- .../app/rest/PrimaryBitstreamController.java | 135 + .../app/rest/ScriptProcessesController.java | 35 +- .../rest/converter/BrowseIndexConverter.java | 30 +- .../BulkAccessConditionConverter.java | 77 + .../rest/converter/CollectionConverter.java | 8 + .../rest/converter/CommunityConverter.java | 8 + .../app/rest/converter/ConverterService.java | 11 +- .../rest/converter/SearchEventConverter.java | 29 + .../SubmissionAccessOptionConverter.java | 12 +- .../DSpaceApiExceptionControllerAdvice.java | 9 + .../RESTBitstreamNotFoundException.java | 51 + .../rest/link/BrowseEntryHalLinkFactory.java | 4 +- .../app/rest/model/BrowseIndexRest.java | 77 +- .../rest/model/BulkAccessConditionRest.java | 84 + .../dspace/app/rest/model/CollectionRest.java | 12 + .../dspace/app/rest/model/CommunityRest.java | 10 + .../rest/model/SearchConfigurationRest.java | 10 + .../app/rest/model/SearchEventRest.java | 9 + .../dspace/app/rest/model/ViewEventRest.java | 9 + .../model/hateoas/BrowseIndexResource.java | 34 +- .../hateoas/BulkAccessConditionResource.java | 27 + .../repository/BitstreamRestRepository.java | 28 + .../repository/BrowseEntryLinkRepository.java | 5 +- .../repository/BrowseIndexRestRepository.java | 36 +- .../repository/BrowseItemLinkRepository.java | 5 +- .../BulkAccessConditionRestRepository.java | 85 + .../BundlePrimaryBitstreamLinkRepository.java | 110 + .../CommunityCollectionLinkRepository.java | 10 + .../CommunitySubcommunityLinkRepository.java | 10 + .../rest/repository/DSpaceRestRepository.java | 6 +- .../MetadataFieldRestRepository.java | 51 +- .../MetadataSchemaRestRepository.java | 21 +- .../ProcessFileTypesLinkRepository.java | 2 +- .../RegistrationRestRepository.java | 1 - .../repository/RequestItemRepository.java | 18 +- .../rest/repository/ScriptRestRepository.java | 23 +- .../SubmissionUploadRestRepository.java | 17 +- .../repository/ViewEventRestRepository.java | 3 +- .../operation/BitstreamRemoveOperation.java | 79 + .../ResourcePolicyEndDateAddOperation.java | 11 +- ...ResourcePolicyEndDateReplaceOperation.java | 11 +- .../ResourcePolicyStartDateAddOperation.java | 11 +- ...sourcePolicyStartDateReplaceOperation.java | 11 +- .../resourcePolicy/ResourcePolicyUtils.java | 31 +- ...lateItemRestPermissionEvaluatorPlugin.java | 83 + .../security/WebSecurityConfiguration.java | 2 +- .../controller/LinksetRestController.java | 194 ++ .../converter/LinksetConverter.java | 39 + .../LinksetRestMessageConverter.java | 51 + .../signposting/hateoas/LinksetResource.java | 26 + .../app/rest/signposting/model/Linkset.java | 139 + .../rest/signposting/model/LinksetNode.java | 68 + .../signposting/model/LinksetRelation.java | 34 + .../model/LinksetRelationType.java | 37 + .../rest/signposting/model/LinksetRest.java | 61 + .../model/MetadataConfiguration.java | 52 + .../rest/signposting/model/TypedLinkRest.java | 74 + .../AbstractSignPostingProcessor.java | 50 + .../processor/SignPostingProcessor.java | 35 + .../bitstream/BitstreamLinksetProcessor.java | 66 + .../BitstreamParentItemProcessor.java | 56 + .../BitstreamSignpostingProcessor.java | 33 + .../bitstream/BitstreamTypeProcessor.java | 58 + .../processor/item/ItemAuthorProcessor.java | 85 + .../item/ItemContentBitstreamsProcessor.java | 61 + .../item/ItemDescribedbyProcessor.java | 50 + .../item/ItemIdentifierProcessor.java | 54 + .../processor/item/ItemLicenseProcessor.java | 51 + .../processor/item/ItemLinksetProcessor.java | 58 + .../item/ItemSignpostingProcessor.java | 34 + .../processor/item/ItemTypeProcessor.java | 61 + ...MetadataDescribesSignpostingProcessor.java | 43 + .../MetadataSignpostingProcessor.java | 30 + .../signposting/service/LinksetService.java | 50 + .../service/impl/LinksetServiceImpl.java | 153 + .../rest/signposting/utils/LinksetMapper.java | 56 + .../AccessConditionAddPatchOperation.java | 19 +- .../AccessConditionReplacePatchOperation.java | 18 +- .../app/rest/submit/step/DescribeStep.java | 11 +- .../app/rest/utils/ApplicationConfig.java | 53 +- .../java/org/dspace/app/rest/utils/Utils.java | 47 +- .../config/spring/api/test-discovery.xml | 2942 +++++++++++++++++ .../config/spring/rest/scripts.xml | 5 + .../BulkAccessControlScriptIT.java | 502 +++ .../dspace/app/itemimport/ItemImportIT.java | 6 + .../opensearch/OpenSearchControllerIT.java | 20 + .../rest/BitstreamFormatRestRepositoryIT.java | 2 +- .../app/rest/BitstreamRestControllerIT.java | 111 + .../app/rest/BitstreamRestRepositoryIT.java | 531 ++- .../app/rest/BrowsesResourceControllerIT.java | 525 ++- .../BulkAccessConditionRestRepositoryIT.java | 256 ++ .../app/rest/CollectionRestRepositoryIT.java | 3 +- ...CrossRefImportMetadataSourceServiceIT.java | 12 +- .../DiscoveryScopeBasedRestControllerIT.java | 677 ++++ .../rest/ItemTemplateRestControllerIT.java | 46 + .../rest/MetadataSchemaRestRepositoryIT.java | 67 +- .../rest/MetadatafieldRestRepositoryIT.java | 256 +- .../rest/PrimaryBitstreamControllerIT.java | 624 ++++ .../app/rest/ProcessRestRepositoryIT.java | 84 +- .../PubmedImportMetadataSourceServiceIT.java | 213 ++ .../rest/RegistrationRestRepositoryIT.java | 10 +- .../app/rest/RequestItemRepositoryIT.java | 100 +- .../app/rest/ScriptRestRepositoryIT.java | 185 +- .../app/rest/SearchEventRestRepositoryIT.java | 110 + .../SubmissionDefinitionsControllerIT.java | 1 - .../app/rest/SubmissionFormsControllerIT.java | 58 +- .../app/rest/ViewEventRestRepositoryIT.java | 55 + .../rest/WorkspaceItemRestRepositoryIT.java | 1 - .../eperson/DeleteEPersonSubmitterIT.java | 31 +- .../app/rest/matcher/BrowseIndexMatcher.java | 33 +- .../app/rest/matcher/FacetEntryMatcher.java | 11 + .../app/rest/matcher/FacetValueMatcher.java | 10 + .../CommunityCollectionLinkRepositoryIT.java | 82 + ...CommunitySubcommunityLinkRepositoryIT.java | 80 + .../controller/LinksetRestControllerIT.java | 990 ++++++ .../utils/RestDiscoverQueryBuilderTest.java | 12 + ...TypeConversionTestScriptConfiguration.java | 5 - .../org/dspace/curate/CurationScriptIT.java | 267 ++ ...MockDSpaceRunnableScriptConfiguration.java | 17 - .../app/rest/pubmedimport-fetch-test.xml | 14 + .../app/rest/pubmedimport-fetch-test2.xml | 14 + .../app/rest/pubmedimport-search-test.xml | 194 ++ .../app/rest/pubmedimport-search-test2.xml | 132 + .../servicemanager/DSpaceServiceManager.java | 19 +- .../java/org/dspace/utils/CallStackUtils.java | 44 + dspace-sword/pom.xml | 19 - dspace-swordv2/pom.xml | 32 - .../oai/metadataFormats/oai_openaire.xsl | 82 +- ...verter-dspace-to-schema-org-uri.properties | 23 + dspace/config/dspace.cfg | 66 +- dspace/config/emails/change_password | 12 +- dspace/config/emails/doi_maintenance_error | 4 +- dspace/config/emails/export_error | 7 +- dspace/config/emails/export_success | 6 +- dspace/config/emails/feedback | 3 +- dspace/config/emails/flowtask_notify | 4 +- dspace/config/emails/harvesting_error | 4 +- dspace/config/emails/healthcheck | 2 +- dspace/config/emails/internal_error | 2 +- dspace/config/emails/register | 10 +- dspace/config/emails/registration_notify | 4 +- dspace/config/emails/request_item.admin | 8 +- dspace/config/emails/request_item.author | 8 +- dspace/config/emails/request_item.granted | 26 + dspace/config/emails/request_item.rejected | 26 + dspace/config/emails/submit_archive | 6 +- dspace/config/emails/submit_reject | 7 +- dspace/config/emails/submit_task | 6 +- dspace/config/emails/subscriptions_content | 2 +- dspace/config/emails/welcome | 5 +- dspace/config/item-submission.dtd | 3 +- dspace/config/local.cfg.EXAMPLE | 18 +- dspace/config/modules/rest.cfg | 4 + dspace/config/modules/signposting.cfg | 35 + .../config/registries/bitstream-formats.xml | 38 +- .../config/spring/api/access-conditions.xml | 30 + .../config/spring/api/arxiv-integration.xml | 4 +- dspace/config/spring/api/core-services.xml | 6 +- .../spring/api/crossref-integration.xml | 14 +- dspace/config/spring/api/discovery.xml | 9 +- .../config/spring/api/identifier-service.xml | 2 - .../config/spring/api/pubmed-integration.xml | 1 + dspace/config/spring/api/requestitem.xml | 46 +- dspace/config/spring/api/scripts.xml | 11 +- dspace/config/spring/oai/oai.xml | 3 + dspace/config/spring/rest/scripts.xml | 11 +- dspace/config/spring/rest/signposting.xml | 50 + dspace/modules/additions/pom.xml | 18 +- dspace/modules/rest/pom.xml | 18 - dspace/modules/server/pom.xml | 16 - dspace/solr/search/conf/schema.xml | 8 +- dspace/src/main/docker-compose/README.md | 161 +- .../src/main/docker-compose/db.entities.yml | 2 +- dspace/src/main/docker-compose/db.restore.yml | 26 + dspace/src/main/docker/README.md | 91 +- .../dspace-postgres-pgcrypto-curl/Dockerfile | 17 +- .../install-pgcrypto.sh | 22 +- .../dspace-postgres-pgcrypto/Dockerfile | 15 +- dspace/src/main/docker/dspace-solr/Dockerfile | 36 + pom.xml | 37 +- 442 files changed, 19935 insertions(+), 7854 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java create mode 100644 dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java create mode 100644 dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java create mode 100644 dspace-api/src/main/java/org/dspace/authorize/package-info.java delete mode 100644 dspace-api/src/main/java/org/dspace/authorize/package.html create mode 100644 dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java create mode 100644 dspace-api/src/main/java/org/dspace/event/package-info.java create mode 100644 dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java create mode 100644 dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java create mode 100644 dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java create mode 100644 dspace-api/src/main/java/org/dspace/util/TimeHelpers.java delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql rename dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/{oracle/V7.0_2021.02.08__tilted_rels.sql => h2/V7.6_2023.03.17__Remove_unused_sequence.sql} (77%) rename dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/{oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql => h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql} (60%) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql rename dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/{oracle/V7.0_2021.01.22__Remove_basic_workflow.sql => postgres/V7.6_2023.03.17__Remove_unused_sequence.sql} (65%) rename dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/{oracle/V7.5_2022.12.15__system_wide_alerts.sql => postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql} (57%) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml create mode 100644 dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java create mode 100644 dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java create mode 100644 dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java create mode 100644 dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java create mode 100644 dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java create mode 100644 dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf create mode 100644 dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 create mode 100644 dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/PrimaryBitstreamController.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BulkAccessConditionConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/model/BulkAccessConditionRest.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BulkAccessConditionResource.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BulkAccessConditionRestRepository.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/controller/LinksetRestController.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/converter/LinksetRestMessageConverter.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/hateoas/LinksetResource.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/Linkset.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetNode.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelation.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRelationType.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/LinksetRest.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/MetadataConfiguration.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/model/TypedLinkRest.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/AbstractSignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/SignPostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemIdentifierProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataDescribesSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/metadata/MetadataSignpostingProcessor.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/LinksetService.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/utils/LinksetMapper.java create mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml create mode 100644 dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java create mode 100644 dspace/config/crosswalks/signposting/mapConverter-dspace-to-schema-org-uri.properties create mode 100644 dspace/config/emails/request_item.granted create mode 100644 dspace/config/emails/request_item.rejected create mode 100644 dspace/config/modules/signposting.cfg create mode 100644 dspace/config/spring/rest/signposting.xml create mode 100644 dspace/src/main/docker-compose/db.restore.yml create mode 100644 dspace/src/main/docker/dspace-solr/Dockerfile diff --git a/.codecov.yml b/.codecov.yml index a628d33cbec5..326dd3e0b29e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,13 +4,6 @@ # Can be validated via instructions at: # https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml -# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed -# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage -# needs to be merged across those builds -codecov: - notify: - after_n_builds: 2 - # Settings related to code coverage analysis coverage: status: diff --git a/.dockerignore b/.dockerignore index 0e42960dc9c0..7d3bdc2b4b0d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,5 @@ dspace/modules/*/target/ Dockerfile.* dspace/src/main/docker/dspace-postgres-pgcrypto dspace/src/main/docker/dspace-postgres-pgcrypto-curl -dspace/src/main/docker/solr dspace/src/main/docker/README.md dspace/src/main/docker-compose/ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dcb98747ba1e..4006656354af 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -88,6 +88,39 @@ jobs: name: ${{ matrix.type }} results path: ${{ matrix.resultsdir }} - # https://github.com/codecov/codecov-action + # Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below) + - name: Upload code coverage report to Artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.type }} coverage report + path: 'dspace/target/site/jacoco-aggregate/jacoco.xml' + retention-days: 14 + + # Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test + # job above. This is necessary because Codecov uploads seem to randomly fail at times. + # See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954 + codecov: + # Must run after 'tests' job above + needs: tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + # Download artifacts from previous 'tests' job + - name: Download coverage artifacts + uses: actions/download-artifact@v3 + + # Now attempt upload to Codecov using its action. + # NOTE: We use a retry action to retry the Codecov upload if it fails the first time. + # + # Retry action: https://github.com/marketplace/actions/retry-action + # Codecov action: https://github.com/codecov/codecov-action - name: Upload coverage to Codecov.io - uses: codecov/codecov-action@v3 + uses: Wandalen/wretry.action@v1.0.36 + with: + action: codecov/codecov-action@v3 + # Try upload 5 times max + attempt_limit: 5 + # Run again in 30 seconds + attempt_delay: 30000 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 64e12f01aac0..971954a5e1ee 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -170,3 +170,86 @@ jobs: # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build_cli.outputs.tags }} labels: ${{ steps.meta_build_cli.outputs.labels }} + + ########################################### + # Build/Push the 'dspace/dspace-solr' image + ########################################### + # Get Metadata for docker_build_solr step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image + id: meta_build_solr + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-solr + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Build and push 'dspace-solr' image + id: docker_build_solr + uses: docker/build-push-action@v3 + with: + context: . + file: ./dspace/src/main/docker/dspace-solr/Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_solr.outputs.tags }} + labels: ${{ steps.meta_build_solr.outputs.labels }} + + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image + ########################################################### + # Get Metadata for docker_build_postgres step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image + id: meta_build_postgres + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-postgres-pgcrypto + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Build and push 'dspace-postgres-pgcrypto' image + id: docker_build_postgres + uses: docker/build-push-action@v3 + with: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + dockerfile: Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_postgres.outputs.tags }} + labels: ${{ steps.meta_build_postgres.outputs.labels }} + + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image ('-loadsql' tag) + ########################################################### + # Get Metadata for docker_build_postgres_loadsql step below + - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image + id: meta_build_postgres_loadsql + uses: docker/metadata-action@v4 + with: + images: dspace/dspace-postgres-pgcrypto + tags: ${{ env.IMAGE_TAGS }} + # Suffix all tags with "-loadsql". Otherwise, it uses the same + # tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above. + flavor: ${{ env.TAGS_FLAVOR }} + suffix=-loadsql + + - name: Build and push 'dspace-postgres-pgcrypto-loadsql' image + id: docker_build_postgres_loadsql + uses: docker/build-push-action@v3 + with: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ + dockerfile: Dockerfile + platforms: ${{ env.PLATFORMS }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ github.event_name != 'pull_request' }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }} + labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }} \ No newline at end of file diff --git a/.github/workflows/issue_opened.yml b/.github/workflows/issue_opened.yml index 5d7c1c30f7d3..b4436dca3aad 100644 --- a/.github/workflows/issue_opened.yml +++ b/.github/workflows/issue_opened.yml @@ -16,7 +16,7 @@ jobs: # Only add to project board if issue is flagged as "needs triage" or has no labels # NOTE: By default we flag new issues as "needs triage" in our issue template if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '') - uses: actions/add-to-project@v0.3.0 + uses: actions/add-to-project@v0.5.0 # Note, the authentication token below is an ORG level Secret. # It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index d71d244c2b02..cc0c7099f40e 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -23,7 +23,7 @@ jobs: steps: # See: https://github.com/prince-chrismc/label-merge-conflicts-action - name: Auto-label PRs with merge conflicts - uses: prince-chrismc/label-merge-conflicts-action@v2 + uses: prince-chrismc/label-merge-conflicts-action@v3 # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Note, the authentication token is created automatically # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token diff --git a/Dockerfile b/Dockerfile index 444a1bcf0b55..f1ff6adf5ac5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ ARG TARGET_DIR=dspace-installer COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH # Need wget to install ant diff --git a/Dockerfile.cli b/Dockerfile.cli index 76e559fc83c3..62e83b79ef02 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -30,12 +30,12 @@ ARG TARGET_DIR=dspace-installer COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH -# Need wget to install ant +# Need wget to install ant, and unzip for managing AIPs RUN apt-get update \ - && apt-get install -y --no-install-recommends wget \ + && apt-get install -y --no-install-recommends wget unzip \ && apt-get purge -y --auto-remove \ && rm -rf /var/lib/apt/lists/* # Download and install 'ant' diff --git a/LICENSES_THIRD_PARTY b/LICENSES_THIRD_PARTY index b96ea77648a6..e494c80c5d6e 100644 --- a/LICENSES_THIRD_PARTY +++ b/LICENSES_THIRD_PARTY @@ -26,7 +26,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava) * JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava) * HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc) - * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/) + * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.18.0 - https://drewnoakes.com/code/exif/) * parso (com.epam:parso:2.0.14 - https://github.com/epam/parso) * Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java) * ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate) @@ -34,12 +34,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core) * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson) * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary) - * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.3 - http://github.com/FasterXML/jackson-dataformats-binary) * Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text) - * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) * Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator) * Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox) * zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/) @@ -56,19 +56,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics) * FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/) * Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson) - * error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + * error-prone annotations (com.google.errorprone:error_prone_annotations:2.18.0 - https://errorprone.info/error_prone_annotations) * Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) - * Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava) + * Guava: Google Core Libraries for Java (com.google.guava:guava:32.0.0-jre - https://github.com/google/guava) * Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5) * Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) * Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client) * GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson) * Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2) - * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) + * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/) * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) * ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap) * libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/) - * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io) + * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.2 - https://jackcess.sourceforge.io) * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net) * project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath) * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath) @@ -79,11 +79,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt) * opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net) * java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst) - * rome (com.rometools:rome:1.18.0 - http://rometools.com/rome) - * rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules) - * rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils) + * rome (com.rometools:rome:1.19.0 - http://rometools.com/rome) + * rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules) + * rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils) * fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net) * T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest) + * config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config) + * ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config) + * akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/) + * akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io) + * akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io) + * akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io) + * akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/) + * akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/) + * scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging) * JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) * SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) @@ -91,20 +100,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/) * Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/) * Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/) - * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/) + * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/) * Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/) * Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/) * Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/) * Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/) * GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson) - * Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/) * OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu) * Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core) * Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite) * Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9) * Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx) * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm) - * micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer) + * micrometer-core (io.micrometer:micrometer-core:1.9.11 - https://github.com/micrometer-metrics/micrometer) * Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/) * Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/) * Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/) @@ -188,88 +196,87 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util) * Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1) * Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix) - * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) - * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/) - * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/) - * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/) - * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) - * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) - * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/) - * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) - * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) - * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) - * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) - * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) - * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) - * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) - * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) - * Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification) - * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) - * Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core) - * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) - * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) - * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) - * Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join) - * Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory) - * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc) - * Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries) - * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) - * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) - * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) - * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) - * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) - * Apache FontBox (org.apache.pdfbox:fontbox:2.0.27 - http://pdfbox.apache.org/) - * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/) - * Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/) - * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox/) - * Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/) - * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) - * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/) - * Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/) - * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/) - * Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/) - * Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core) - * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj) + * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) + * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-api/) + * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-core/) + * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-jul/) + * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) + * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) + * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-web/) + * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) + * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) + * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) + * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) + * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) + * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) + * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) + * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) + * Lucene Classification (org.apache.lucene:lucene-classification:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-classification) + * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-codecs) + * Lucene Core (org.apache.lucene:lucene-core:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-core) + * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-expressions) + * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-grouping) + * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-highlighter) + * Lucene Join (org.apache.lucene:lucene-join:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-join) + * Lucene Memory (org.apache.lucene:lucene-memory:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-memory) + * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-misc) + * Lucene Queries (org.apache.lucene:lucene-queries:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queries) + * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queryparser) + * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-sandbox) + * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) + * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) + * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-suggest) + * Apache FontBox (org.apache.pdfbox:fontbox:2.0.28 - http://pdfbox.apache.org/) + * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/) + * Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/) + * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.28 - https://www.apache.org/pdfbox-parent/pdfbox/) + * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) + * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.27 - https://www.apache.org/pdfbox-parent/xmpbox/) + * Apache POI - Common (org.apache.poi:poi:5.2.3 - https://poi.apache.org/) + * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-scratchpad:5.2.3 - https://poi.apache.org/) + * Apache Solr Core (org.apache.solr:solr-core:8.11.2 - https://lucene.apache.org/solr-parent/solr-core) + * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.2 - https://lucene.apache.org/solr-parent/solr-solrj) * Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl) * Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec) * Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org) - * Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/) - * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/) - * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/) - * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/) - * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/) - * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/) - * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/) - * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/) - * Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/) - * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/) - * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/) - * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/) - * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/) - * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/) - * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/) - * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/) - * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/) - * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/) - * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/) - * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/) - * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/) - * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/) - * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/) - * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) - * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/) - * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/) - * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/) + * Apache Tika core (org.apache.tika:tika-core:2.5.0 - https://tika.apache.org/) + * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.5.0 - https://tika.apache.org/tika-parser-apple-module/) + * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.5.0 - https://tika.apache.org/tika-parser-audiovideo-module/) + * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.5.0 - https://tika.apache.org/tika-parser-cad-module/) + * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.5.0 - https://tika.apache.org/tika-parser-code-module/) + * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.5.0 - https://tika.apache.org/tika-parser-crypto-module/) + * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.5.0 - https://tika.apache.org/tika-parser-digest-commons/) + * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.5.0 - https://tika.apache.org/tika-parser-font-module/) + * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.5.0 - https://tika.apache.org/tika-parser-html-module/) + * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.5.0 - https://tika.apache.org/tika-parser-image-module/) + * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.5.0 - https://tika.apache.org/tika-parser-mail-commons/) + * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.5.0 - https://tika.apache.org/tika-parser-mail-module/) + * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.5.0 - https://tika.apache.org/tika-parser-microsoft-module/) + * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.5.0 - https://tika.apache.org/tika-parser-miscoffice-module/) + * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.5.0 - https://tika.apache.org/tika-parser-news-module/) + * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.5.0 - https://tika.apache.org/tika-parser-ocr-module/) + * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.5.0 - https://tika.apache.org/tika-parser-pdf-module/) + * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.5.0 - https://tika.apache.org/tika-parser-pkg-module/) + * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.5.0 - https://tika.apache.org/tika-parser-text-module/) + * Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.5.0 - https://tika.apache.org/tika-parser-webarchive-module/) + * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.5.0 - https://tika.apache.org/tika-parser-xml-module/) + * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.5.0 - https://tika.apache.org/tika-parser-xmp-commons/) + * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.5.0 - https://tika.apache.org/tika-parser-zip-commons/) + * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.5.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) + * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.75 - https://tomcat.apache.org/) * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/) * Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/) * Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/) * Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/) - * XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/) + * XmlBeans (org.apache.xmlbeans:xmlbeans:5.1.1 - https://xmlbeans.apache.org/) * Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper) * Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute) * org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) - * AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/) + * AssertJ fluent assertions (org.assertj:assertj-core:3.22.0 - https://assertj.github.io/doc/assertj-core/) * Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector) * jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/) * TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/) @@ -279,34 +286,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org) * flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core) @@ -315,8 +322,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator) - * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator) + * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb) + * leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex) * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org) @@ -337,59 +346,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) * Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api) + * jwarc (org.netpreserve:jwarc:0.19.0 - https://github.com/iipc/jwarc) * Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis) * parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org) * parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org) * RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/) - * JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) - * Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework) - * Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework) - * spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) + * Scala Library (org.scala-lang:scala-library:2.13.9 - https://www.scala-lang.org/) + * Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/) + * scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/) + * scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/) + * scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/) + * scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/) + * JSONassert (org.skyscreamer:jsonassert:1.5.1 - https://github.com/skyscreamer/JSONassert) + * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.36 - http://www.slf4j.org) + * Spring AOP (org.springframework:spring-aop:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Beans (org.springframework:spring-beans:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context (org.springframework:spring-context:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context Support (org.springframework:spring-context-support:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Core (org.springframework:spring-core:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring JDBC (org.springframework:spring-jdbc:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring TestContext Framework (org.springframework:spring-test:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Transaction (org.springframework:spring-tx:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web (org.springframework:spring-web:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web MVC (org.springframework:spring-webmvc:5.3.27 - https://github.com/spring-projects/spring-framework) + * spring-boot (org.springframework.boot:spring-boot:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) * Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor) - * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot) - * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) - * Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons) - * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) - * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) - * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas) + * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test (org.springframework.boot:spring-boot-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * Spring Data Core (org.springframework.data:spring-data-commons:2.7.12 - https://www.spring.io/spring-data/spring-data-commons) + * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) + * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) + * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.5.4 - https://github.com/spring-projects/spring-hateoas) * Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core) - * spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security) - * spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-config (org.springframework.security:spring-security-config:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-core (org.springframework.security:spring-security-core:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-crypto (org.springframework.security:spring-security-crypto:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-test (org.springframework.security:spring-security-test:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-web (org.springframework.security:spring-security-web:5.7.8 - https://spring.io/projects/spring-security) * SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/) * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) * xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/) * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/) - * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/) + * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/) * org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/) - * SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org) + * SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml) * software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/) * Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/) * xalan (xalan:xalan:2.7.0 - no url defined) @@ -404,7 +421,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security) * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) * JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/) - * curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi) + * curvesapi (com.github.virtuald:curvesapi:1.07 - https://github.com/virtuald/curvesapi) * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) * JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/) * dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org) @@ -426,11 +443,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) * asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/) * asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/) - * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.3 - https://jdbc.postgresql.org) + * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.6.0 - https://jdbc.postgresql.org) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio) * XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/) + CC0: + + * reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/) + Common Development and Distribution License (CDDL): * istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/) @@ -446,7 +467,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net) * javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net) * jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) * HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils) @@ -489,34 +510,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) @@ -542,10 +563,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) - * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm) - * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm) - * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) + * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.15.Final - https://hibernate.org/orm) * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org) * im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) @@ -562,9 +583,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines MIT License: + * better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files) * Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver) - * dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist) + * dd-plist (com.googlecode.plist:dd-plist:1.25 - http://www.github.com/3breadt/dd-plist) * DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis) + * s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock) * JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html) @@ -572,15 +595,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec) * Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org) - * Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org) + * Checker Qual (org.checkerframework:checker-qual:3.31.0 - https://checkerframework.org) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito) * mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito) * ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model) - * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org) - * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org) - * SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org) + * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.36 - http://www.slf4j.org) + * SLF4J API Module (org.slf4j:slf4j-api:1.7.36 - http://www.slf4j.org) * SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org) * HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org) * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) @@ -589,7 +611,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org) * urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org) * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org) - * core-js (org.webjars.npm:core-js:3.28.0 - https://www.webjars.org) + * core-js (org.webjars.npm:core-js:3.30.1 - https://www.webjars.org) * @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org) Mozilla Public License: @@ -606,17 +628,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * JSON in Java (org.json:json:20230227 - https://github.com/douglascrockford/JSON-java) * LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html) - The JSON License: - - * JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java) - UnRar License: - * Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar) + * Java Unrar (com.github.junrar:junrar:7.5.3 - https://github.com/junrar/junrar) Unicode/ICU License: diff --git a/docker-compose.yml b/docker-compose.yml index 6008b873ae5f..36ba6af2c981 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -62,13 +62,17 @@ services: while (! /dev/null 2>&1; do sleep 1; done; /dspace/bin/dspace database migrate catalina.sh run - # DSpace database container + # DSpace PostgreSQL database container dspacedb: container_name: dspacedb + # Uses a custom Postgres image with pgcrypto installed + image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-dspace-7_x}" + build: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ environment: PGDATA: /pgdata - # Uses a custom Postgres image with pgcrypto installed - image: dspace/dspace-postgres-pgcrypto + POSTGRES_PASSWORD: dspace networks: dspacenet: ports: @@ -77,12 +81,17 @@ services: stdin_open: true tty: true volumes: + # Keep Postgres data directory between reboots - pgdata:/pgdata # DSpace Solr container dspacesolr: container_name: dspacesolr - # Uses official Solr image at https://hub.docker.com/_/solr/ - image: solr:8.11-slim + image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-dspace-7_x}" + build: + context: . + dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile + args: + SOLR_VERSION: "${SOLR_VER:-8.11}" networks: dspacenet: ports: @@ -92,30 +101,25 @@ services: tty: true working_dir: /var/solr/data volumes: - # Mount our local Solr core configs so that they are available as Solr configsets on container - - ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority - - ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai - - ./dspace/solr/search:/opt/solr/server/solr/configsets/search - - ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics # Keep Solr data directory between reboots - solr_data:/var/solr/data - # Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr + # Initialize all DSpace Solr cores then start Solr: # * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op - # * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core - # to the latest configs. If it's a newly created core, this is a no-op. + # * Second, copy configsets to this core: + # Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr` entrypoint: - /bin/bash - '-c' - | init-var-solr precreate-core authority /opt/solr/server/solr/configsets/authority - cp -r -u /opt/solr/server/solr/configsets/authority/* authority + cp -r /opt/solr/server/solr/configsets/authority/* authority precreate-core oai /opt/solr/server/solr/configsets/oai - cp -r -u /opt/solr/server/solr/configsets/oai/* oai + cp -r /opt/solr/server/solr/configsets/oai/* oai precreate-core search /opt/solr/server/solr/configsets/search - cp -r -u /opt/solr/server/solr/configsets/search/* search + cp -r /opt/solr/server/solr/configsets/search/* search precreate-core statistics /opt/solr/server/solr/configsets/statistics - cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics + cp -r /opt/solr/server/solr/configsets/statistics/* statistics exec solr -f volumes: assetstore: diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 0dbd31b4ed47..d12bc2ffeb60 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -784,7 +784,7 @@ org.json json - 20180130 + 20230227 @@ -896,7 +896,7 @@ - + io.findify s3mock_2.13 @@ -913,7 +913,7 @@ - + @@ -980,7 +980,8 @@ org.scala-lang scala-library - 2.13.2 + 2.13.9 + test diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java index 8d189038d9d1..91dcfb5dfec5 100644 --- a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.administer; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. */ public class ProcessCleanerConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java new file mode 100644 index 000000000000..50e1022dbe37 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -0,0 +1,689 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.apache.commons.collections4.CollectionUtils.isEmpty; +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.TimeZone; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException; +import org.dspace.app.bulkaccesscontrol.model.AccessCondition; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput; +import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.model.AccessConditionOption; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControl extends DSpaceRunnable> { + + private DSpaceObjectUtils dSpaceObjectUtils; + + private SearchService searchService; + + private ItemService itemService; + + private String filename; + + private List uuids; + + private Context context; + + private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService; + + private ResourcePolicyService resourcePolicyService; + + protected EPersonService epersonService; + + private ConfigurationService configurationService; + + private MediaFilterService mediaFilterService; + + private Map itemAccessConditions; + + private Map uploadAccessConditions; + + private final String ADD_MODE = "add"; + + private final String REPLACE_MODE = "replace"; + + private boolean help = false; + + protected String eperson = null; + + @Override + @SuppressWarnings("unchecked") + public void setup() throws ParseException { + + this.searchService = SearchUtils.getSearchService(); + this.itemService = ContentServiceFactory.getInstance().getItemService(); + this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + this.epersonService = EPersonServiceFactory.getInstance().getEPersonService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + mediaFilterService.setLogHandler(handler); + this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName( + "bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class); + this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + BulkAccessConditionConfiguration bulkAccessConditionConfiguration = + bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default"); + + itemAccessConditions = bulkAccessConditionConfiguration + .getItemAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + uploadAccessConditions = bulkAccessConditionConfiguration + .getBitstreamAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + help = commandLine.hasOption('h'); + filename = commandLine.getOptionValue('f'); + uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null; + } + + @Override + public void internalRun() throws Exception { + + if (help) { + printHelp(); + return; + } + + ObjectMapper mapper = new ObjectMapper(); + mapper.setTimeZone(TimeZone.getTimeZone("UTC")); + BulkAccessControlInput accessControl; + context = new Context(Context.Mode.BATCH_EDIT); + setEPerson(context); + + if (!isAuthorized(context)) { + handler.logError("Current user is not eligible to execute script bulk-access-control"); + throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control"); + } + + if (uuids == null || uuids.size() == 0) { + handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)"); + throw new IllegalArgumentException("At least one target uuid must be provided"); + } + + InputStream inputStream = handler.getFileStream(context, filename) + .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + + "found for filename: " + filename)); + + try { + accessControl = mapper.readValue(inputStream, BulkAccessControlInput.class); + } catch (IOException e) { + handler.logError("Error parsing json file " + e.getMessage()); + throw new IllegalArgumentException("Error parsing json file", e); + } + try { + validate(accessControl); + updateItemsAndBitstreamsPolices(accessControl); + context.complete(); + } catch (Exception e) { + handler.handleException(e); + context.abort(); + } + } + + /** + * check the validation of mapped json data, it must + * provide item or bitstream information or both of them + * and check the validation of item node if provided, + * and check the validation of bitstream node if provided. + * + * @param accessControl mapped json data + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if accessControl is invalid + */ + private void validate(BulkAccessControlInput accessControl) throws SQLException { + + AccessConditionItem item = accessControl.getItem(); + AccessConditionBitstream bitstream = accessControl.getBitstream(); + + if (Objects.isNull(item) && Objects.isNull(bitstream)) { + handler.logError("item or bitstream node must be provided"); + throw new BulkAccessControlException("item or bitstream node must be provided"); + } + + if (Objects.nonNull(item)) { + validateItemNode(item); + } + + if (Objects.nonNull(bitstream)) { + validateBitstreamNode(bitstream); + } + } + + /** + * check the validation of item node, the item mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information + * of accessCondition must be provided, + * also checking that accessConditions information are valid. + * + * @param item the item node + * @throws BulkAccessControlException if item node is invalid + */ + private void validateItemNode(AccessConditionItem item) { + String mode = item.getMode(); + List accessConditions = item.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("item mode node must be provided"); + throw new BulkAccessControlException("item mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for item mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for item mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + } + + for (AccessCondition accessCondition : accessConditions) { + validateAccessCondition(accessCondition); + } + } + + /** + * check the validation of bitstream node, the bitstream mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information of accessConditions + * must be provided, + * also checking that constraint information is valid, + * also checking that accessConditions information are valid. + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if bitstream node is invalid + */ + private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException { + String mode = bitstream.getMode(); + List accessConditions = bitstream.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("bitstream mode node must be provided"); + throw new BulkAccessControlException("bitstream mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for bitstream mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + } + + validateConstraint(bitstream); + + for (AccessCondition accessCondition : bitstream.getAccessConditions()) { + validateAccessCondition(accessCondition); + } + } + + /** + * check the validation of constraint node if provided, + * constraint isn't supported when multiple uuids are provided + * or when uuid isn't an Item + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if constraint node is invalid + */ + private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException { + if (uuids.size() > 1 && containsConstraints(bitstream)) { + handler.logError("constraint isn't supported when multiple uuids are provided"); + throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided"); + } else if (uuids.size() == 1 && containsConstraints(bitstream)) { + DSpaceObject dso = + dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0))); + + if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) { + handler.logError("constraint is not supported when uuid isn't an Item"); + throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item"); + } + } + } + + /** + * check the validation of access condition, + * the access condition name must equal to one of configured access conditions, + * then call {@link AccessConditionOption#validateResourcePolicy( + * Context, String, Date, Date)} if exception happens so, it's invalid. + * + * @param accessCondition the accessCondition + * @throws BulkAccessControlException if the accessCondition is invalid + */ + private void validateAccessCondition(AccessCondition accessCondition) { + + if (!itemAccessConditions.containsKey(accessCondition.getName())) { + handler.logError("wrong access condition <" + accessCondition.getName() + ">"); + throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">"); + } + + try { + itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy( + context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate()); + } catch (Exception e) { + handler.logError("invalid access condition, " + e.getMessage()); + handler.handleException(e); + } + } + + /** + * find all items of provided {@link #uuids} from solr, + * then update the resource policies of items + * or bitstreams of items (only bitstreams of ORIGINAL bundles) + * and derivative bitstreams, or both of them. + * + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws SearchServiceException if a search error occurs + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl) + throws SQLException, SearchServiceException, AuthorizeException { + + int counter = 0; + int start = 0; + int limit = 20; + + String query = buildSolrQuery(uuids); + + Iterator itemIterator = findItems(query, start, limit); + + while (itemIterator.hasNext()) { + + Item item = context.reloadEntity(itemIterator.next()); + + if (Objects.nonNull(accessControl.getItem())) { + updateItemPolicies(item, accessControl); + } + + if (Objects.nonNull(accessControl.getBitstream())) { + updateBitstreamsPolicies(item, accessControl); + } + + context.commit(); + context.uncacheEntity(item); + counter++; + + if (counter == limit) { + counter = 0; + start += limit; + itemIterator = findItems(query, start, limit); + } + } + } + + private String buildSolrQuery(List uuids) throws SQLException { + String [] query = new String[uuids.size()]; + + for (int i = 0 ; i < query.length ; i++) { + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i))); + + if (dso.getType() == Constants.COMMUNITY) { + query[i] = "location.comm:" + dso.getID(); + } else if (dso.getType() == Constants.COLLECTION) { + query[i] = "location.coll:" + dso.getID(); + } else if (dso.getType() == Constants.ITEM) { + query[i] = "search.resourceid:" + dso.getID(); + } + } + return StringUtils.joinWith(" OR ", query); + } + + private Iterator findItems(String query, int start, int limit) + throws SearchServiceException { + + DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()) + .iterator(); + } + + private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + discoverQuery.setStart(start); + discoverQuery.setMaxResults(limit); + + return discoverQuery; + } + + /** + * update the item resource policies, + * when mode equals to 'replace' will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + AccessConditionItem acItem = accessControl.getItem(); + + if (REPLACE_MODE.equals(acItem.getMode())) { + removeReadPolicies(item, TYPE_CUSTOM); + removeReadPolicies(item, TYPE_INHERITED); + } + + setItemPolicies(item, accessControl); + logInfo(acItem.getAccessConditions(), acItem.getMode(), item); + } + + /** + * create the new resource policies of item. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust item's default policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void setItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + accessControl + .getItem() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(item, accessCondition, + itemAccessConditions.get(accessCondition.getName()))); + + itemService.adjustItemPolicies(context, item, item.getOwningCollection()); + } + + /** + * update the resource policies of all item's bitstreams + * or bitstreams specified into constraint node, + * and derivative bitstreams. + * + * NOTE: only bitstreams of ORIGINAL bundles + * + * @param item the item contains bitstreams + * @param accessControl the access control input + */ + private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) { + AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); + + // look over all the bundles and force initialization of bitstreams collection + // to avoid lazy initialization exception + long count = item.getBundles() + .stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .count(); + + item.getBundles(CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> constraints == null || + constraints.getUuid() == null || + constraints.getUuid().size() == 0 || + constraints.getUuid().contains(bitstream.getID().toString())) + .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); + } + + /** + * check that the bitstream node is existed, + * and contains constraint node, + * and constraint contains uuids. + * + * @param bitstream the bitstream node + * @return true when uuids of constraint of bitstream is not empty, + * otherwise false + */ + private boolean containsConstraints(AccessConditionBitstream bitstream) { + return Objects.nonNull(bitstream) && + Objects.nonNull(bitstream.getConstraints()) && + isNotEmpty(bitstream.getConstraints().getUuid()); + } + + /** + * update the bitstream resource policies, + * when mode equals to replace will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws RuntimeException if something goes wrong in the database + * or an authorization error occurs + */ + private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) { + + AccessConditionBitstream acBitstream = accessControl.getBitstream(); + + if (REPLACE_MODE.equals(acBitstream.getMode())) { + removeReadPolicies(bitstream, TYPE_CUSTOM); + removeReadPolicies(bitstream, TYPE_INHERITED); + } + + try { + setBitstreamPolicies(bitstream, item, accessControl); + logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + } + + /** + * remove dspace object's read policies. + * + * @param dso the dspace object + * @param type resource policy type + * @throws BulkAccessControlException if something goes wrong + * in the database or an authorization error occurs + */ + private void removeReadPolicies(DSpaceObject dso, String type) { + try { + resourcePolicyService.removePolicies(context, dso, type, Constants.READ); + } catch (SQLException | AuthorizeException e) { + throw new BulkAccessControlException(e); + } + } + + /** + * create the new resource policies of bitstream. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust bitstream's default policies. + * and also update the resource policies of its derivative bitstreams. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + accessControl.getBitstream() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition, + uploadAccessConditions.get(accessCondition.getName()))); + + itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); + mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream); + } + + /** + * create the resource policy from the information + * comes from the access condition. + * + * @param obj the dspace object + * @param accessCondition the access condition + * @param accessConditionOption the access condition option + * @throws BulkAccessControlException if an exception occurs + */ + private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, + AccessConditionOption accessConditionOption) { + + String name = accessCondition.getName(); + String description = accessCondition.getDescription(); + Date startDate = accessCondition.getStartDate(); + Date endDate = accessCondition.getEndDate(); + + try { + accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); + } catch (Exception e) { + throw new BulkAccessControlException(e); + } + } + + /** + * Set the eperson in the context + * + * @param context the context + * @throws SQLException if database error + */ + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } + + private void logInfo(List accessConditions, String mode, DSpaceObject dso) { + String type = dso.getClass().getSimpleName(); + + if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies"); + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + return; + } + + StringBuilder message = new StringBuilder(); + message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ") + .append(type) + .append(" {") + .append(dso.getID()) + .append("} policy") + .append(mode.equals(ADD_MODE) ? " with " : " to ") + .append("access conditions:"); + + AppendAccessConditionsInfo(message, accessConditions); + + handler.logInfo(message.toString()); + + if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) { + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + } + } + + private void AppendAccessConditionsInfo(StringBuilder message, List accessConditions) { + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + message.append("{"); + + for (int i = 0; i < accessConditions.size(); i++) { + message.append(accessConditions.get(i).getName()); + + Optional.ofNullable(accessConditions.get(i).getStartDate()) + .ifPresent(date -> message.append(", start_date=" + dateFormat.format(date))); + + Optional.ofNullable(accessConditions.get(i).getEndDate()) + .ifPresent(date -> message.append(", end_date=" + dateFormat.format(date))); + + if (i != accessConditions.size() - 1) { + message.append(", "); + } + } + + message.append("}"); + } + + private boolean isAppendModeEnabled() { + return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode"); + } + + protected boolean isAuthorized(Context context) { + return true; + } + + @Override + @SuppressWarnings("unchecked") + public BulkAccessControlScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java new file mode 100644 index 000000000000..4e8cfe480eeb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.scripts.DSpaceCommandLineParameter; + +/** + * Extension of {@link BulkAccessControl} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCli extends BulkAccessControl { + + @Override + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson; + eperson = commandLine.getOptionValue('e'); + + if (eperson == null) { + handler.logError("An eperson to do the the Bulk Access Control must be specified " + + "(run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified"); + } + + if (StringUtils.contains(eperson, '@')) { + myEPerson = epersonService.findByEmail(context, eperson); + } else { + myEPerson = epersonService.find(context, UUID.fromString(eperson)); + } + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)"); + throw new UnsupportedOperationException("EPerson cannot be found: " + eperson); + } + + context.setCurrentUser(myEPerson); + } + + @Override + protected boolean isAuthorized(Context context) { + + if (context.getCurrentUser() == null) { + return false; + } + + return getScriptConfiguration().isAllowedToExecute(context, + Arrays.stream(commandLine.getOptions()) + .map(option -> + new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue())) + .collect(Collectors.toList())); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java new file mode 100644 index 000000000000..951c93db3030 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; + +import org.apache.commons.cli.Options; + +/** + * Extension of {@link BulkAccessControlScriptConfiguration} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCliScriptConfiguration + extends BulkAccessControlScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("e", "eperson", true, "email of EPerson used to perform actions"); + options.getOption("e").setRequired(true); + + options.addOption("h", "help", false, "help"); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java new file mode 100644 index 000000000000..5196247f94cb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -0,0 +1,110 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.cli.Options; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.utils.DSpace; + +/** + * Script configuration for {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + * @param the {@link BulkAccessControl} type + */ +public class BulkAccessControlScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + + try { + if (Objects.isNull(commandLineParameters)) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else { + List dspaceObjectIDs = + commandLineParameters.stream() + .filter(parameter -> "-u".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .collect(Collectors.toList()); + + DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + for (String dspaceObjectID : dspaceObjectIDs) { + + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID)); + + if (Objects.isNull(dso)) { + throw new IllegalArgumentException(); + } + + if (!authorizeService.isAdmin(context, dso)) { + return false; + } + } + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + + return true; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this + * BulkImportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java new file mode 100644 index 000000000000..092611eb0654 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.exception; + +/** + * Exception for errors that occurs during the bulk access control + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlException extends RuntimeException { + + private static final long serialVersionUID = -74730626862418515L; + + /** + * Constructor with error message and cause. + * + * @param message the error message + * @param cause the error cause + */ + public BulkAccessControlException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructor with error message. + * + * @param message the error message + */ + public BulkAccessControlException(String message) { + super(message); + } + + /** + * Constructor with error cause. + * + * @param cause the error cause + */ + public BulkAccessControlException(Throwable cause) { + super(cause); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java new file mode 100644 index 000000000000..6cf95e0e2179 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.Date; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; +import org.dspace.util.MultiFormatDateDeserializer; + +/** + * Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessCondition { + + private String name; + + private String description; + + @JsonDeserialize(using = MultiFormatDateDeserializer.class) + private Date startDate; + + @JsonDeserialize(using = MultiFormatDateDeserializer.class) + private Date endDate; + + public AccessCondition() { + } + + public AccessCondition(String name, String description, Date startDate, Date endDate) { + this.name = name; + this.description = description; + this.startDate = startDate; + this.endDate = endDate; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public Date getStartDate() { + return startDate; + } + + public Date getEndDate() { + return endDate; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java new file mode 100644 index 000000000000..2176e24d7f9d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the value of bitstream node + * from json file of the {@link BulkAccessControl} + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionBitstream { + + private String mode; + + private Constraint constraints; + + private List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public Constraint getConstraints() { + return constraints; + } + + public void setConstraints(Constraint constraints) { + this.constraints = constraints; + } + + public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } + + public class Constraint { + + private List uuid; + + public List getUuid() { + return uuid; + } + + public void setUuid(List uuid) { + this.uuid = uuid; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java new file mode 100644 index 000000000000..c482dfc34d65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the value of item node + * from json file of the {@link BulkAccessControl} + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionItem { + + String mode; + + List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java new file mode 100644 index 000000000000..a2ebbe5a12d4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.List; + +import org.dspace.submit.model.AccessConditionOption; + +/** + * A collection of conditions to be met when bulk access condition. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionConfiguration { + + private String name; + private List itemAccessConditionOptions; + private List bitstreamAccessConditionOptions; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getItemAccessConditionOptions() { + return itemAccessConditionOptions; + } + + public void setItemAccessConditionOptions( + List itemAccessConditionOptions) { + this.itemAccessConditionOptions = itemAccessConditionOptions; + } + + public List getBitstreamAccessConditionOptions() { + return bitstreamAccessConditionOptions; + } + + public void setBitstreamAccessConditionOptions( + List bitstreamAccessConditionOptions) { + this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java new file mode 100644 index 000000000000..0f8852a71f7d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the content of the JSON file used as input for the {@link BulkAccessControl} + * + *
    + * {
    + * item: {
    + * mode: "replace",
    + * accessConditions: [
    + * {
    + * "name": "openaccess"
    + * }
    + * ]
    + * },
    + * bitstream: {
    + * constraints: {
    + * uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN],
    + * },
    + * mode: "add",
    + * accessConditions: [
    + * {
    + * "name": "embargo",
    + * "startDate": "2024-06-24T23:59:59.999+0000"
    + * }
    + * ]
    + * }
    + * } + *
    + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlInput { + + AccessConditionItem item; + + AccessConditionBitstream bitstream; + + public BulkAccessControlInput() { + } + + public BulkAccessControlInput(AccessConditionItem item, + AccessConditionBitstream bitstream) { + this.item = item; + this.bitstream = bitstream; + } + + public AccessConditionItem getItem() { + return item; + } + + public void setItem(AccessConditionItem item) { + this.item = item; + } + + public AccessConditionBitstream getBitstream() { + return bitstream; + } + + public void setBitstream(AccessConditionBitstream bitstream) { + this.bitstream = bitstream; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java new file mode 100644 index 000000000000..321b6d928e92 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.service; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.collections4.CollectionUtils; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Simple bean to manage different Bulk Access Condition configurations + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionConfigurationService { + + @Autowired + private List bulkAccessConditionConfigurations; + + public List getBulkAccessConditionConfigurations() { + if (CollectionUtils.isEmpty(bulkAccessConditionConfigurations)) { + return new ArrayList<>(); + } + return bulkAccessConditionConfigurations; + } + + public BulkAccessConditionConfiguration getBulkAccessConditionConfiguration(String name) { + return getBulkAccessConditionConfigurations().stream() + .filter(x -> name.equals(x.getName())) + .findFirst() + .orElse(null); + } + + public void setBulkAccessConditionConfigurations( + List bulkAccessConditionConfigurations) { + this.bulkAccessConditionConfigurations = bulkAccessConditionConfigurations; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index 9ccd53944a24..fb228e7041b8 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 31556afc8d3d..aa76c09c0a5b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java index 65994040badc..ce2f7fb68af1 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java @@ -8,22 +8,15 @@ package org.dspace.app.bulkedit; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataImport} script */ public class MetadataImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 982973e47c50..ff83c3ecb225 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -7,18 +7,11 @@ */ package org.dspace.app.harvest; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class HarvestScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; private Class dspaceRunnableClass; @@ -32,13 +25,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java index cf70120d27d3..b37df5f5ea59 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -7,14 +7,9 @@ */ package org.dspace.app.itemexport; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemExport} script @@ -23,9 +18,6 @@ */ public class ItemExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index 6870b94eee1d..b32de11f7a7f 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -11,6 +11,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.net.URL; import java.nio.file.Files; import java.sql.SQLException; import java.util.ArrayList; @@ -22,6 +23,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.authorize.AuthorizeException; @@ -74,10 +76,12 @@ public class ItemImport extends DSpaceRunnable { protected boolean isQuiet = false; protected boolean commandLineCollections = false; protected boolean zip = false; + protected boolean remoteUrl = false; protected String zipfilename = null; + protected boolean zipvalid = false; protected boolean help = false; protected File workDir = null; - private File workFile = null; + protected File workFile = null; protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); @@ -233,10 +237,21 @@ public void internalRun() throws Exception { handler.logInfo("***End of Test Run***"); } } finally { - // clean work dir if (zip) { - FileUtils.deleteDirectory(new File(sourcedir)); - FileUtils.deleteDirectory(workDir); + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { + workFile.delete(); + } } Date endTime = new Date(); @@ -253,6 +268,17 @@ public void internalRun() throws Exception { * @param context */ protected void validate(Context context) { + // check zip type: uploaded file or remote url + if (commandLine.hasOption('z')) { + zipfilename = commandLine.getOptionValue('z'); + } else if (commandLine.hasOption('u')) { + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } + if (StringUtils.isBlank(zipfilename)) { + throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file"); + } + if (command == null) { handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); throw new UnsupportedOperationException("Must run with either add, replace, or remove"); @@ -295,7 +321,6 @@ protected void process(Context context, ItemImportService itemImportService, handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE); } finally { mapFile.delete(); - workFile.delete(); } } @@ -306,17 +331,55 @@ protected void process(Context context, ItemImportService itemImportService, * @throws Exception */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { - Optional optionalFileStream = handler.getFileStream(context, zipfilename); - if (optionalFileStream.isPresent()) { + Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); + if (!remoteUrl) { + // manage zip via upload + optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); + } else { + // manage zip via remote url + optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + } + + if (validationFileStream.isPresent()) { + // validate zip file + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } + + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } } /** @@ -356,7 +419,6 @@ protected void setMapFile() throws IOException { */ protected void setZip() { zip = true; - zipfilename = commandLine.getOptionValue('z'); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 35de7b443a97..98d2469b7155 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,10 +8,15 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.net.URL; import java.sql.SQLException; import java.util.List; +import java.util.Optional; import java.util.UUID; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.content.Collection; @@ -62,7 +67,7 @@ protected void validate(Context context) { handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); throw new UnsupportedOperationException("Must run with either add, replace, or remove"); } else if ("add".equals(command) || "replace".equals(command)) { - if (sourcedir == null) { + if (!remoteUrl && sourcedir == null) { handler.logError("A source directory containing items must be set (run with -h flag for details)"); throw new UnsupportedOperationException("A source directory containing items must be set"); } @@ -96,10 +101,43 @@ protected void process(Context context, ItemImportService itemImportService, protected void readZip(Context context, ItemImportService itemImportService) throws Exception { // If this is a zip archive, unzip it first if (zip) { - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR - + File.separator + context.getCurrentUser().getID()); - sourcedir = itemImportService.unzip( - new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip( + new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + } else { + // manage zip via remote url + Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + + workFile = new File(itemImportService.getTempWorkDir() + File.separator + + zipfilename + "-" + context.getCurrentUser().getID()); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } else { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + } } } @@ -120,6 +158,12 @@ protected void setZip() { zip = true; zipfilename = commandLine.getOptionValue('z'); } + + if (commandLine.hasOption('u')) { // remote url + zip = true; + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } } @Override diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java index d265cbf4a1d6..89abd7155b39 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java @@ -37,6 +37,9 @@ public Options getOptions() { options.addOption(Option.builder("z").longOpt("zip") .desc("name of zip file") .hasArg().required(false).build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") .hasArg().required(false).build()); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index a3149040c49b..3f2675ea58f1 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -8,14 +8,10 @@ package org.dspace.app.itemimport; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemImport} script @@ -24,9 +20,6 @@ */ public class ItemImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +32,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); @@ -64,7 +48,10 @@ public Options getOptions() { options.addOption(Option.builder("z").longOpt("zip") .desc("name of zip file") .type(InputStream.class) - .hasArg().required().build()); + .hasArg().build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); options.addOption(Option.builder("c").longOpt("collection") .desc("destination collection(s) Handle or database ID") .hasArg().required(false).build()); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java index 076cc8ebe20e..4148232cf3ba 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java @@ -952,9 +952,10 @@ protected void addDCValue(Context c, Item i, String schema, Node n) String qualifier = getAttributeValue(n, "qualifier"); //NodeValue(); // //getElementData(n, // "qualifier"); - String language = getAttributeValue(n, "language"); - if (language != null) { - language = language.trim(); + + String language = null; + if (StringUtils.isNotBlank(getAttributeValue(n, "language"))) { + language = getAttributeValue(n, "language").trim(); } if (!isQuiet) { diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index 467303c3cafd..afe1bb3d75df 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,9 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo File f2 = null; File f3 = null; try { - f2 = getImageFile(f, 0, verbose); + // Step 1: get an image from our PDF file, with PDF-specific processing options + f2 = getImageFile(f, verbose); + // Step 2: use the image above to create the final resized and rotated thumbnail f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d16243e3e3bc..408982d157e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,9 +116,17 @@ public File getThumbnailFile(File f, boolean verbose) return f2; } - public File getImageFile(File f, int page, boolean verbose) + /** + * Return an image from a bitstream with specific processing options for + * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to + * generate an intermediate image file for use with getThumbnailFile. + */ + public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { - File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + // Writing an intermediate file to disk is inefficient, but since we're + // doing it anyway, we should use a lossless format. IM's internal MIFF + // is lossless like PNG and TIFF, but much faster. + File f2 = new File(f.getParentFile(), f.getName() + ".miff"); f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); @@ -155,7 +163,7 @@ public File getImageFile(File f, int page, boolean verbose) op.define("pdf:use-cropbox=true"); } - String s = "[" + page + "]"; + String s = "[0]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { op.flatten(); @@ -208,20 +216,20 @@ public boolean preProcessBitstream(Context c, Item item, Bitstream source, boole if (description != null) { if (replaceRegex.matcher(description).matches()) { if (verbose) { - System.out.format("%s %s matches pattern and is replacable.%n", - description, nsrc); + System.out.format("%s %s matches pattern and is replaceable.%n", + description, n); } continue; } if (description.equals(getDescription())) { if (verbose) { System.out.format("%s %s is replaceable.%n", - getDescription(), nsrc); + getDescription(), n); } continue; } } - System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", + System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", nsrc, item.getHandle()); return false; } diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java new file mode 100644 index 000000000000..4221a514d7d5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; + +import org.dspace.content.Item; +import org.im4java.core.ConvertCmd; +import org.im4java.core.IM4JavaException; +import org.im4java.core.IMOperation; + + +/** + * Filter video bitstreams, scaling the image to be within the bounds of + * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be + * no bigger than. Creates only JPEGs. + */ +public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter { + private static final int DEFAULT_WIDTH = 180; + private static final int DEFAULT_HEIGHT = 120; + private static final int FRAME_NUMBER = 100; + + /** + * @param currentItem item + * @param source source input stream + * @param verbose verbose mode + * @return InputStream the resulting input stream + * @throws Exception if error + */ + @Override + public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) + throws Exception { + File f = inputStreamToTempFile(source, "imthumb", ".tmp"); + File f2 = null; + try { + f2 = getThumbnailFile(f, verbose); + byte[] bytes = Files.readAllBytes(f2.toPath()); + return new ByteArrayInputStream(bytes); + } finally { + //noinspection ResultOfMethodCallIgnored + f.delete(); + if (f2 != null) { + //noinspection ResultOfMethodCallIgnored + f2.delete(); + } + } + } + + @Override + public File getThumbnailFile(File f, boolean verbose) + throws IOException, InterruptedException, IM4JavaException { + File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + f2.deleteOnExit(); + ConvertCmd cmd = new ConvertCmd(); + IMOperation op = new IMOperation(); + op.autoOrient(); + op.addImage("VIDEO:" + f.getAbsolutePath() + "[" + FRAME_NUMBER + "]"); + op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH), + configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT)); + op.addImage(f2.getAbsolutePath()); + if (verbose) { + System.out.println("IM Thumbnail Param: " + op); + } + cmd.run(op); + return f2; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 26347c56ee96..867e684db86b 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -7,25 +7,16 @@ */ package org.dspace.app.mediafilter; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class MediaFilterScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; @@ -36,16 +27,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index 6b7f833e6dde..e2c6c9c5db06 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -8,13 +8,17 @@ package org.dspace.app.mediafilter; import java.io.InputStream; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; @@ -315,25 +319,25 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo // check if destination bitstream exists Bundle existingBundle = null; - Bitstream existingBitstream = null; + List existingBitstreams = new ArrayList(); List bundles = itemService.getBundles(item, formatFilter.getBundleName()); if (bundles.size() > 0) { - // only finds the last match (FIXME?) + // only finds the last matching bundle and all matching bitstreams in the proper bundle(s) for (Bundle bundle : bundles) { List bitstreams = bundle.getBitstreams(); for (Bitstream bitstream : bitstreams) { if (bitstream.getName().trim().equals(newName.trim())) { existingBundle = bundle; - existingBitstream = bitstream; + existingBitstreams.add(bitstream); } } } } // if exists and overwrite = false, exit - if (!overWrite && (existingBitstream != null)) { + if (!overWrite && (existingBitstreams.size() > 0)) { if (!isQuiet) { logInfo("SKIPPED: bitstream " + source.getID() + " (item: " + item.getHandle() + ") because '" + newName + "' already exists"); @@ -388,18 +392,7 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo bitstreamService.update(context, b); //Set permissions on the derivative bitstream - //- First remove any existing policies - authorizeService.removeAllPolicies(context, b); - - //- Determine if this is a public-derivative format - if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { - //- Set derivative bitstream to be publicly accessible - Group anonymous = groupService.findByName(context, Group.ANONYMOUS); - authorizeService.addPolicy(context, b, Constants.READ, anonymous); - } else { - //- replace the policies using the same in the source bitstream - authorizeService.replaceAllPolicies(context, source, b); - } + updatePoliciesOfDerivativeBitstream(context, b, formatFilter, source); //do post-processing of the generated bitstream formatFilter.postProcessBitstream(context, item, b); @@ -408,9 +401,8 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo logError("!!! OutOfMemoryError !!!"); } - // fixme - set date? // we are overwriting, so remove old bitstream - if (existingBitstream != null) { + for (Bitstream existingBitstream : existingBitstreams) { bundleService.removeBitstream(context, existingBundle, existingBitstream); } @@ -422,6 +414,71 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo return true; } + @Override + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException { + + if (filterClasses == null) { + return; + } + + for (FormatFilter formatFilter : filterClasses) { + for (Bitstream bitstream : findDerivativeBitstreams(item, source, formatFilter)) { + updatePoliciesOfDerivativeBitstream(context, bitstream, formatFilter, source); + } + } + } + + /** + * find derivative bitstreams related to source bitstream + * + * @param item item containing bitstreams + * @param source source bitstream + * @param formatFilter formatFilter + * @return list of derivative bitstreams from source bitstream + * @throws SQLException If something goes wrong in the database + */ + private List findDerivativeBitstreams(Item item, Bitstream source, FormatFilter formatFilter) + throws SQLException { + + String bitstreamName = formatFilter.getFilteredName(source.getName()); + List bundles = itemService.getBundles(item, formatFilter.getBundleName()); + + return bundles.stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .filter(bitstream -> + StringUtils.equals(bitstream.getName().trim(), bitstreamName.trim())) + .collect(Collectors.toList()); + } + + /** + * update resource polices of derivative bitstreams. + * by remove all resource policies and + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context the context + * @param bitstream derivative bitstream + * @param formatFilter formatFilter + * @param source the source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + private void updatePoliciesOfDerivativeBitstream(Context context, Bitstream bitstream, FormatFilter formatFilter, + Bitstream source) throws SQLException, AuthorizeException { + + authorizeService.removeAllPolicies(context, bitstream); + + if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + authorizeService.addPolicy(context, bitstream, Constants.READ, anonymous); + } else { + authorizeService.replaceAllPolicies(context, source, bitstream); + } + } + @Override public Item getCurrentItem() { return currentItem; diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java index 50a6bb3a2027..bc92ff521098 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java @@ -7,10 +7,12 @@ */ package org.dspace.app.mediafilter.service; +import java.sql.SQLException; import java.util.List; import java.util.Map; import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -91,6 +93,22 @@ public void applyFiltersCollection(Context context, Collection collection) public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter) throws Exception; + /** + * update resource polices of derivative bitstreams + * related to source bitstream. + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context context + * @param item item containing bitstreams + * @param source source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException; + /** * Return the item that is currently being processed/filtered * by the MediaFilterManager. diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 384f33decaf2..6499c45a7830 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -11,55 +11,59 @@ import java.io.IOException; import java.sql.SQLException; import java.util.List; +import javax.annotation.ManagedBean; +import javax.inject.Inject; +import javax.inject.Singleton; import javax.mail.MessagingException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.app.requestitem.factory.RequestItemServiceFactory; import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; -import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; /** * Send item requests and responses by email. * + *

    The "strategy" by which approvers are chosen is in an implementation of + * {@link RequestItemAuthorExtractor} which is injected by the name + * {@code requestItemAuthorExtractor}. See the DI configuration documents. + * * @author Mark H. Wood */ +@Singleton +@ManagedBean public class RequestItemEmailNotifier { private static final Logger LOG = LogManager.getLogger(); - private static final BitstreamService bitstreamService - = ContentServiceFactory.getInstance().getBitstreamService(); + @Inject + protected BitstreamService bitstreamService; - private static final ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); + @Inject + protected ConfigurationService configurationService; - private static final HandleService handleService - = HandleServiceFactory.getInstance().getHandleService(); + @Inject + protected HandleService handleService; - private static final RequestItemService requestItemService - = RequestItemServiceFactory.getInstance().getRequestItemService(); + @Inject + protected RequestItemService requestItemService; - private static final RequestItemAuthorExtractor requestItemAuthorExtractor - = DSpaceServicesFactory.getInstance() - .getServiceManager() - .getServiceByName("requestItemAuthorExtractor", - RequestItemAuthorExtractor.class); + protected final RequestItemAuthorExtractor requestItemAuthorExtractor; - private RequestItemEmailNotifier() {} + @Inject + public RequestItemEmailNotifier(RequestItemAuthorExtractor requestItemAuthorExtractor) { + this.requestItemAuthorExtractor = requestItemAuthorExtractor; + } /** * Send the request to the approver(s). @@ -70,7 +74,7 @@ private RequestItemEmailNotifier() {} * @throws IOException passed through. * @throws SQLException if the message was not sent. */ - static public void sendRequest(Context context, RequestItem ri, String responseLink) + public void sendRequest(Context context, RequestItem ri, String responseLink) throws IOException, SQLException { // Who is making this request? List authors = requestItemAuthorExtractor @@ -147,12 +151,38 @@ static public void sendRequest(Context context, RequestItem ri, String responseL * @param message email body (may be empty). * @throws IOException if sending failed. */ - static public void sendResponse(Context context, RequestItem ri, String subject, + public void sendResponse(Context context, RequestItem ri, String subject, String message) throws IOException { + // Who granted this request? + List grantors; + try { + grantors = requestItemAuthorExtractor.getRequestItemAuthor(context, ri.getItem()); + } catch (SQLException e) { + LOG.warn("Failed to get grantor's name and address: {}", e.getMessage()); + grantors = List.of(); + } + + String grantorName; + String grantorAddress; + if (grantors.isEmpty()) { + grantorName = configurationService.getProperty("mail.admin.name"); + grantorAddress = configurationService.getProperty("mail.admin"); + } else { + RequestItemAuthor grantor = grantors.get(0); // XXX Cannot know which one + grantorName = grantor.getFullName(); + grantorAddress = grantor.getEmail(); + } + // Build an email back to the requester. - Email email = new Email(); - email.setContent("body", message); + Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), + ri.isAccept_request() ? "request_item.granted" : "request_item.rejected")); + email.addArgument(ri.getReqName()); // {0} requestor's name + email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item + email.addArgument(ri.getItem().getName()); // {2} title of the requested Item + email.addArgument(grantorName); // {3} name of the grantor + email.addArgument(grantorAddress); // {4} email of the grantor + email.addArgument(message); // {5} grantor's optional message email.setSubject(subject); email.addRecipient(ri.getReqEmail()); // Attach bitstreams. @@ -167,17 +197,25 @@ static public void sendResponse(Context context, RequestItem ri, String subject, if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), + // #8636 Anyone receiving the email can respond to the + // request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment( + bitstreamService.retrieve(context, bitstream), + bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } } else { Bitstream bitstream = ri.getBitstream(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } email.send(); } else { @@ -207,7 +245,7 @@ static public void sendResponse(Context context, RequestItem ri, String subject, * @throws IOException if the message body cannot be loaded or the message * cannot be sent. */ - static public void requestOpenAccess(Context context, RequestItem ri) + public void requestOpenAccess(Context context, RequestItem ri) throws IOException { Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "request_item.admin")); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java index 5886f16fde1a..fa7c15b23060 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java @@ -12,10 +12,15 @@ * e-mailed to a responsible party for consideration and action. Find details * in the user documentation under the rubric "Request a Copy". * - *

    This package includes several "strategy" classes which discover responsible - * parties in various ways. See {@link RequestItemSubmitterStrategy} and the - * classes which extend it. A strategy class must be configured and identified - * as {@link RequestItemAuthorExtractor} for injection into code which requires - * Request a Copy services. + *

    Mailing is handled by {@link RequestItemEmailNotifier}. Responsible + * parties are represented by {@link RequestItemAuthor} + * + *

    This package includes several "strategy" classes which discover + * responsible parties in various ways. See + * {@link RequestItemSubmitterStrategy} and the classes which extend it, and + * others which implement {@link RequestItemAuthorExtractor}. A strategy class + * must be configured and identified as {@link requestItemAuthorExtractor} + * (note capitalization) for injection into code which requires Request + * a Copy services. */ package org.dspace.app.requestitem; diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java index b238ccf061f3..067c76cce8b3 100644 --- a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -8,7 +8,6 @@ package org.dspace.app.solrdatabaseresync; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -27,11 +26,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableCl this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index aae042d0cf01..afd661ded1fb 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Objects; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -145,6 +146,9 @@ public Optional getField(String fieldName) { } catch (DCInputsReaderException e) { log.error(e.getMessage(), e); } + } else if (field.isRelationshipField() && + ("relation." + field.getRelationshipType()).equals(fieldName)) { + return Optional.of(field); } else { String fullName = field.getFieldName(); if (fullName.equals(fieldName)) { diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 8eb3a0674049..2f591b6e7a8c 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -7,8 +7,6 @@ */ package org.dspace.app.util; -import static org.dspace.content.Item.ANY; - import java.io.File; import java.sql.SQLException; import java.util.ArrayList; @@ -25,10 +23,12 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.InProgressSubmission; +import org.dspace.content.Item; import org.dspace.content.edit.EditItem; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -111,6 +111,13 @@ public class SubmissionConfigReader { */ private SubmissionConfig lastSubmissionConfig = null; + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + /** * Load Submission Configuration from the * item-submission.xml configuration file @@ -158,6 +165,9 @@ private void buildInputs(String fileName) throws SubmissionConfigReaderException } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -229,8 +239,10 @@ public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle) public SubmissionConfig getCorrectionSubmissionConfigByCollection(Collection collection) { CollectionService collService = ContentServiceFactory.getInstance().getCollectionService(); - String submitName = collService.getMetadataFirstValue(collection, - "cris", "submission", "definition-correction", ANY); + String submitName = + collService.getMetadataFirstValue( + collection, "cris", "submission", "definition-correction", Item.ANY + ); if (submitName != null) { SubmissionConfig subConfig = getSubmissionConfigByName(submitName); @@ -377,7 +389,7 @@ public SubmissionStepConfig getStepConfig(String stepID) * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -424,18 +436,23 @@ private void doNodes(Node n) throws SAXException, SubmissionConfigReaderExceptio * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -445,7 +462,17 @@ private void processMap(Node e) throws SAXException { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } @@ -739,4 +766,4 @@ public SubmissionConfig getSubmissionConfigByInProgressSubmission(InProgressSubm return getSubmissionConfigByCollection(object.getCollection()); } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 8f155b63307d..c1402499c444 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -51,6 +51,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.core.I18nUtil; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; @@ -91,6 +92,7 @@ public class SyndicationFeed { // default DC fields for entry protected String defaultTitleField = "dc.title"; + protected String defaultDescriptionField = "dc.description"; protected String defaultAuthorField = "dc.contributor.author"; protected String defaultDateField = "dc.date.issued"; private static final String[] defaultDescriptionFields = @@ -196,15 +198,15 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); - feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); + defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION); objectURL = resolveURL(request, null); } else { Bitstream logo = null; if (dso instanceof IndexableCollection) { Collection col = ((IndexableCollection) dso).getIndexedObject(); defaultTitle = col.getName(); - feed.setDescription(collectionService.getMetadataFirstValue(col, - CollectionService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = collectionService.getMetadataFirstValue(col, + CollectionService.MD_SHORT_DESCRIPTION, Item.ANY); logo = col.getLogo(); String cols = configurationService.getProperty("webui.feed.podcast.collections"); if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) { @@ -214,8 +216,8 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } else if (dso instanceof IndexableCommunity) { Community comm = ((IndexableCommunity) dso).getIndexedObject(); defaultTitle = comm.getName(); - feed.setDescription(communityService.getMetadataFirstValue(comm, - CommunityService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = communityService.getMetadataFirstValue(comm, + CommunityService.MD_SHORT_DESCRIPTION, Item.ANY); logo = comm.getLogo(); String comms = configurationService.getProperty("webui.feed.podcast.communities"); if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) { @@ -230,6 +232,12 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ? localize(labels, MSG_FEED_TITLE) : defaultTitle); + + if (defaultDescriptionField == null || defaultDescriptionField == "") { + defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description"); + } + + feed.setDescription(defaultDescriptionField); feed.setLink(objectURL); feed.setPublishedDate(new Date()); feed.setUri(objectURL); diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java index 9c37fcee4755..3b2366034489 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java @@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod { */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class); - /** - * Whether to look for x-forwarded headers for logging IP addresses - */ - protected static Boolean useProxies; - /** * All the IP matchers */ @@ -250,7 +245,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) log.debug(LogHelper.getHeader(context, "authenticated", "special_groups=" + gsb.toString() - + " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")" + + " (by IP=" + addr + ")" )); } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index f3c6022e02c2..afd82db863ba 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -11,9 +11,11 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Hashtable; +import java.util.Iterator; import java.util.List; import javax.naming.NamingEnumeration; import javax.naming.NamingException; @@ -64,6 +66,7 @@ * @author Reuben Pasquini * @author Samuel Ottenhoff * @author Ivan Masár + * @author Michael Plate */ public class LDAPAuthentication implements AuthenticationMethod { @@ -391,7 +394,7 @@ private static class SpeakerToLDAP { protected String ldapGivenName = null; protected String ldapSurname = null; protected String ldapPhone = null; - protected String ldapGroup = null; + protected ArrayList ldapGroup = null; /** * LDAP settings @@ -406,9 +409,9 @@ private static class SpeakerToLDAP { final String ldap_surname_field; final String ldap_phone_field; final String ldap_group_field; - final boolean useTLS; + SpeakerToLDAP(Logger thelog) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -547,7 +550,11 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con if (attlist[4] != null) { att = atts.get(attlist[4]); if (att != null) { - ldapGroup = (String) att.get(); + // loop through all groups returned by LDAP + ldapGroup = new ArrayList(); + for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { + ldapGroup.add((String) val.next()); + } } } @@ -693,48 +700,69 @@ public String getName() { /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. + * + * @param dn + * The string containing distinguished name of the user + * + * @param group + * List of strings with LDAP dn of groups + * + * @param context + * DSpace context */ - private void assignGroups(String dn, String group, Context context) { + private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); int i = 1; String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); - boolean cmp; + + // groupmap contains the mapping of LDAP groups to DSpace groups + // outer loop with the DSpace groups while (groupMap != null) { String t[] = groupMap.split(":"); String ldapSearchString = t[0]; String dspaceGroupName = t[1]; - if (group == null) { - cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); - } else { - cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString); - } + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); - } else { - // The group does not exist - log.warn(LogHelper.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + } else { + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + // assign user to this group + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + i + + " does not exist :: " + dspaceGroupName)); + } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); } - } catch (AuthorizeException ae) { - log.debug(LogHelper.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); } } diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 014de4671c8b..5eef69af7398 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -47,6 +47,7 @@ import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -676,60 +677,6 @@ public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group g } } - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context The relevant DSpace Context. - * @param embargoDate embargo end date - * @param reason embargo reason - * @param dso DSpace object - * @param owningCollection collection to get group policies from - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - @Override - public void generateAutomaticPolicies(Context context, Date embargoDate, - String reason, DSpaceObject dso, Collection owningCollection) - throws SQLException, AuthorizeException { - - if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - - List authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); - - removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); - - // look for anonymous - boolean isAnonymousInPlace = false; - for (Group g : authorizedGroups) { - if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) { - isAnonymousInPlace = true; - } - } - if (!isAnonymousInPlace) { - // add policies for all the groups - for (Group g : authorizedGroups) { - ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ, - reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - - } else { - // add policy just for anonymous - ResourcePolicy rp = createOrModifyPolicy(null, context, null, - groupService.findByName(context, Group.ANONYMOUS), null, - embargoDate, Constants.READ, reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - } - } - @Override public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException { @@ -831,6 +778,19 @@ public boolean isCollectionAdmin(Context context) throws SQLException { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index 954bb9699038..c781400bae45 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -41,9 +41,16 @@ @Entity @Table(name = "resourcepolicy") public class ResourcePolicy implements ReloadableEntity { + /** This policy was set on submission, to give the submitter access. */ public static String TYPE_SUBMISSION = "TYPE_SUBMISSION"; + + /** This policy was set to allow access by a workflow group. */ public static String TYPE_WORKFLOW = "TYPE_WORKFLOW"; + + /** This policy was explicitly set on this object. */ public static String TYPE_CUSTOM = "TYPE_CUSTOM"; + + /** This policy was copied from the containing object's default policies. */ public static String TYPE_INHERITED = "TYPE_INHERITED"; @Id @@ -93,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "rpdescription") private String rpdescription; diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java index d707bf200b4e..87bf459bcbeb 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java @@ -40,6 +40,9 @@ public List findByDsoAndType(Context context, DSpaceObject dSpac public List findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action) + throws SQLException; + public List findByDSoAndActionAndType(Context c, DSpaceObject o, int actionId, String type) throws SQLException; @@ -64,9 +67,6 @@ public List findByEPersonGroupTypeIdAction(Context context, EPer public void deleteByDsoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; - public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action) - throws SQLException; - public void deleteByDsoAndType(Context context, DSpaceObject dSpaceObject, String type) throws SQLException; public void deleteByGroup(Context context, Group group) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java index ee79933361d0..3c002459ff18 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java @@ -125,6 +125,19 @@ public List findByDSoAndActionAndType(Context context, DSpaceObj return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); } + @Override + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId) + throws SQLException { + String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId " + + "AND rptype = :rptype AND actionId= :actionId"; + Query query = createQuery(context, queryString); + query.setParameter("dsoId", dso.getID()); + query.setParameter("rptype", type); + query.setParameter("actionId", actionId); + query.executeUpdate(); + + } + @Override public List findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException { @@ -203,19 +216,6 @@ public void deleteByDsoAndType(Context context, DSpaceObject dso, String type) t query.executeUpdate(); } - @Override - public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId) - throws SQLException { - String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId " - + "AND rptype = :rptype AND actionId= :actionId"; - Query query = createQuery(context, queryString); - query.setParameter("dsoId", dso.getID()); - query.setParameter("rptype", type); - query.setParameter("actionId", actionId); - query.executeUpdate(); - - } - @Override public void deleteByGroup(Context context, Group group) throws SQLException { String queryString = "delete from ResourcePolicy where epersonGroup= :epersonGroup"; diff --git a/dspace-api/src/main/java/org/dspace/authorize/package-info.java b/dspace-api/src/main/java/org/dspace/authorize/package-info.java new file mode 100644 index 000000000000..f36c39cfe351 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/package-info.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +/** + * Represents permissions for access to DSpace content. + * + *

    Philosophy

    + * DSpace's authorization system follows the classical "police state" + * philosophy of security - the user can do nothing, unless it is + * specifically allowed. Those permissions are spelled out with + * {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table + * in the database. + * + *

    Policies are attached to Content

    + * Resource Policies get assigned to all of the content objects in + * DSpace - collections, communities, items, bundles, and bitstreams. + * (Currently they are not attached to non-content objects such as + * {@code EPerson} or {@code Group}. But they could be, hence the name + * {@code ResourcePolicy} instead of {@code ContentPolicy}.) + * + *

    Policies are tuples

    + * Authorization is based on evaluating the tuple of (object, action, actor), + * such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson} + * "John Smith" can read an item. {@code ResourcePolicy} objects are pretty + * simple, describing a single instance of (object, action, actor). If + * multiple actors are desired, such as groups 10, 11, and 12 are allowed to + * READ Item 13, you simply create a {@code ResourcePolicy} for each group. + * + *

    Built-in groups

    + * The install process should create two built-in groups - {@code Anonymous} + * for anonymous/public access, and {@code Administrators} for administrators. + * Group {@code Anonymous} allows anyone access, even if not authenticated. + * Group {@code Administrators}' members have super-user rights, + * and are allowed to do any action to any object. + * + *

    Policy types + * Policies have a "type" used to distinguish policies which are applied for + * specific purposes. + *
    + *
    CUSTOM
    + *
    These are created and assigned explicitly by users.
    + *
    INHERITED
    + *
    These are copied from a containing object's default policies.
    + *
    SUBMISSION
    + *
    These are applied during submission to give the submitter access while + * composing a submission.
    + *
    WORKFLOW
    + *
    These are automatically applied during workflow, to give curators + * access to submissions in their curation queues. They usually have an + * automatically-created workflow group as the actor.
    + * + *

    Start and End dates

    + * A policy may have a start date and/or an end date. The policy is + * considered not valid before the start date or after the end date. No date + * means do not apply the related test. For example, embargo until a given + * date can be expressed by a READ policy with a given start date, and a + * limited-time offer by a READ policy with a given end date. + * + * @author dstuve + * @author mwood + */ +package org.dspace.authorize; diff --git a/dspace-api/src/main/java/org/dspace/authorize/package.html b/dspace-api/src/main/java/org/dspace/authorize/package.html deleted file mode 100644 index 66ce0f824773..000000000000 --- a/dspace-api/src/main/java/org/dspace/authorize/package.html +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - -

    Handles permissions for DSpace content. -

    - -

    Philosophy
    -DSpace's authorization system follows the classical "police state" -philosophy of security - the user can do nothing, unless it is -specifically allowed. Those permissions are spelled out with -ResourcePolicy objects, stored in the resourcepolicy table in the -database. -

    - -

    Policies are attached to Content

    -

    Policies are attached to Content
    -Resource Policies get assigned to all of the content objects in -DSpace - collections, communities, items, bundles, and bitstreams. -(Currently they are not attached to non-content objects such as EPerson -or Group. But they could be, hence the name ResourcePolicy instead of -ContentPolicy.) -

    - -

    Policies are tuples

    -Authorization is based on evaluating the tuple of (object, action, who), -such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith" -can read an item. ResourcePolicy objects are pretty simple, describing a single instance of -(object, action, who). If multiple who's are desired, such as Groups 10, 11, and -12 are allowed to READ Item 13, you simply create a ResourcePolicy for each -group. -

    - -

    Special Groups

    -The install process should create two special groups - group 0, for -anonymous/public access, and group 1 for administrators. -Group 0 (public/anonymous) allows anyone access, even if they are not -authenticated. Group 1's (admin) members have super-user rights, and -are allowed to do any action to any object. -

    - -

    Unused ResourcePolicy attributes

    -ResourcePolicies have a few attributes that are currently unused, -but are included with the intent that they will be used someday. -One is start and end dates, for when policies will be active, so that -permissions for content can change over time. The other is the EPerson - -policies could apply to only a single EPerson, but for ease of -administration currently a Group is the recommended unit to use to -describe 'who'. -

    - - - diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 43ae51544c9b..3db676d88b2b 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -489,24 +489,6 @@ public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Grou public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException; - - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context current context - * @param embargoDate date - * @param reason reason - * @param dso DSpaceObject - * @param owningCollection collection - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso, - Collection owningCollection) throws SQLException, AuthorizeException; - public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException; @@ -551,6 +533,15 @@ void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * @@ -646,7 +637,7 @@ long countAdminAuthorizedCollection(Context context, String query) /** * Replace all the policies in the target object with exactly the same policies that exist in the source object - * + * * @param context DSpace Context * @param source source of policies * @param dest destination of inherited policies diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java index beb3c34662df..662b14b18b2e 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java @@ -56,12 +56,19 @@ public List find(Context c, EPerson e, List groups, int a throws SQLException; /** - * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID. - * This method can be used to detect duplicate ResourcePolicies. + * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring + * IDs with a specific PolicyID. This method can be used to detect duplicate + * ResourcePolicies. * - * @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies. - * @return List of resource policies for the same DSpaceObject, group and action but other policyID. - * @throws SQLException + * @param context current DSpace session. + * @param dso find policies for this object. + * @param group find policies referring to this group. + * @param action find policies for this action. + * @param notPolicyID ResourcePolicies with this ID will be ignored while + * looking out for equal ResourcePolicies. + * @return List of resource policies for the same DSpaceObject, group and + * action but other policyID. + * @throws SQLException passed through. */ public List findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) @@ -71,6 +78,16 @@ public List findByTypeGroupActionExceptId(Context context, DSpac public boolean isDateValid(ResourcePolicy resourcePolicy); + /** + * Create and persist a copy of a given ResourcePolicy, with an empty + * dSpaceObject field. + * + * @param context current DSpace session. + * @param resourcePolicy the policy to be copied. + * @return the copy. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException; public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException; @@ -123,6 +140,7 @@ public List findExceptRpType(Context c, DSpaceObject o, int acti * @param ePerson ePerson whose policies want to find * @param offset the position of the first result to return * @param limit paging limit + * @return some of the policies referring to {@code ePerson}. * @throws SQLException if database error */ public List findByEPerson(Context context, EPerson ePerson, int offset, int limit) diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 14e439d5908c..5d5f2ccb755e 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -99,10 +101,10 @@ private BrowseIndex() { /** * Constructor for creating generic / internal index objects - * + * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { this(baseName, "item"); } @@ -735,7 +737,7 @@ public static BrowseIndex getBrowseIndex(SortOption so) throws BrowseException { /** * Get the internally defined browse index for archived items. - * + * * @param displayType * * @return browse index diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index c9c140fb0b5b..20c43fc37298 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -18,6 +18,7 @@ import org.dspace.core.Context; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.web.ContextUtil; /** * This class provides a standard interface to all item counting @@ -49,9 +50,20 @@ public class ItemCounter { */ private Context context; + /** + * This field is used to hold singular instance of a class. + * Singleton pattern is used but this class should be + * refactored to modern DSpace approach (injectible service). + */ + + private static ItemCounter instance; + protected ItemService itemService; protected ConfigurationService configurationService; + private boolean showStrengths; + private boolean useCache; + /** * Construct a new item counter which will use the given DSpace Context * @@ -63,21 +75,42 @@ public ItemCounter(Context context) throws ItemCountException { this.dao = ItemCountDAOFactory.getInstance(this.context); this.itemService = ContentServiceFactory.getInstance().getItemService(); this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", false); + this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true); } /** - * Get the count of the items in the given container. If the configuration - * value webui.strengths.cache is equal to 'true' this will return the - * cached value if it exists. If it is equal to 'false' it will count - * the number of items in the container in real time. + * Get the singular instance of a class. + * It creates a new instance at the first usage of this method. + * + * @return instance af a class + * @throws ItemCountException when error occurs + */ + public static ItemCounter getInstance() throws ItemCountException { + if (instance == null) { + instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext()); + } + return instance; + } + + /** + * Get the count of the items in the given container. If the configuration + * value webui.strengths.show is equal to 'true' this method will return all + * archived items. If the configuration value webui.strengths.show is equal to + * 'false' this method will return -1. + * If the configuration value webui.strengths.cache + * is equal to 'true' this will return the cached value if it exists. + * If it is equal to 'false' it will count the number of items + * in the container in real time. * * @param dso DSpaceObject * @return count * @throws ItemCountException when error occurs */ public int getCount(DSpaceObject dso) throws ItemCountException { - boolean useCache = configurationService.getBooleanProperty( - "webui.strengths.cache", true); + if (!showStrengths) { + return -1; + } if (useCache) { return dao.getCount(dso); diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index 0194be59f3a7..3676133a89f9 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -264,7 +264,7 @@ private void addLocationScopeFilter(DiscoverQuery query) { } private void addDefaultFilterQueries(DiscoverQuery query) { - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); } diff --git a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java index 6b16d51bfe1e..a12ac3b98a2e 100644 --- a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java +++ b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java @@ -245,7 +245,7 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { info.setProcessStartDate(new Date()); try { - Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); + Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); if (MapUtils.isNotEmpty(checksumMap)) { info.setBitstreamFound(true); if (checksumMap.containsKey("checksum")) { @@ -255,10 +255,16 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { if (checksumMap.containsKey("checksum_algorithm")) { info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString()); } + + // compare new checksum to previous checksum + info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); + + } else { + info.setCurrentChecksum(""); + info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); + info.setToBeProcessed(false); } - // compare new checksum to previous checksum - info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); } catch (IOException e) { // bitstream located, but file missing from asset store info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 485f1d645130..20c43e4bfc73 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.dspace.core.Constants.ADD; +import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.REMOVE; import static org.dspace.core.Constants.WRITE; @@ -34,6 +35,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.eperson.Group; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -74,14 +76,14 @@ public Bundle find(Context context, UUID id) throws SQLException { if (bundle == null) { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "not_found,bundle_id=" + id)); + "not_found,bundle_id=" + id)); } return null; } else { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "bundle_id=" + id)); + "bundle_id=" + id)); } return bundle; @@ -106,7 +108,7 @@ public Bundle create(Context context, Item item, String name) throws SQLExceptio log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); // if we ever use the identifier service for bundles, we should // create the bundle before we create the Event and should add all @@ -132,12 +134,12 @@ public Bitstream getBitstreamByName(Bundle bundle, String name) { @Override public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.ADD); log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id=" - + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + + bundle.getID() + ",bitstream_id=" + bitstream.getID())); // First check that the bitstream isn't already in the list List bitstreams = bundle.getBitstreams(); @@ -167,28 +169,61 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); + // The next logic is a bit overly cautious but ensures that if there are any future start dates + // on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection + // just in case. In practice, the item install process would overwrite these anyway but it may satisfy + // some other bitstream creation methods and integration tests + boolean isEmbargoed = false; + for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) { + if (!resourcePolicyService.isDateValid(resourcePolicy)) { + isEmbargoed = true; + break; + } + } + if (owningItem != null && !isEmbargoed) { + // Resolve owning collection + Collection owningCollection = owningItem.getOwningCollection(); + if (owningCollection != null) { + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + authorizeService.getAuthorizedGroups(context, owningCollection, + Constants.DEFAULT_BITSTREAM_READ); + log.info(defaultBitstreamReadGroups.size()); + // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy + // inherited from the bundle with this policy. + if (!defaultBitstreamReadGroups.isEmpty()) { + // Remove read policies from the bitstream + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) { + // Inherit this policy as READ, directly from the collection roles + authorizeService.addPolicy(context, bitstream, + Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED); + } + } + } + } bitstreamService.update(context, bitstream); } @Override public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws AuthorizeException, SQLException, IOException { + throws AuthorizeException, SQLException, IOException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.REMOVE); log.info(LogHelper.getHeader(context, "remove_bitstream", - "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); //Ensure that the last modified from the item is triggered ! Item owningItem = (Item) getParentObject(context, bundle); @@ -221,9 +256,9 @@ public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) @Override public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List policies = authorizeService.getPoliciesActionFilter(context, collection, - Constants.DEFAULT_BITSTREAM_READ); + Constants.DEFAULT_BITSTREAM_READ); // change the action to just READ // just don't call update on the resourcepolicies!!! @@ -231,7 +266,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col if (!i.hasNext()) { throw new java.sql.SQLException("Collection " + collection.getID() - + " has no default bitstream READ policies"); + + " has no default bitstream READ policies"); } List newPolicies = new ArrayList(); @@ -246,7 +281,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col @Override public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List newpolicies) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List bitstreams = bundle.getBitstreams(); if (CollectionUtils.isNotEmpty(bitstreams)) { for (Bitstream bs : bitstreams) { @@ -368,16 +403,16 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws if (bitstream == null) { //This should never occur but just in case log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; } // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { log.warn(LogHelper.getHeader(context, - "Encountered a bitstream not in this bundle while changing bitstream " + - "order. Bitstream order will not be changed.", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Encountered a bitstream not in this bundle while changing bitstream " + + "order. Bitstream order will not be changed.", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); @@ -386,9 +421,9 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { log.warn(LogHelper.getHeader(context, - "Size of old list and new list do not match. Bitstream order will not be " + - "changed.", - "Bundle: " + bundle.getID())); + "Size of old list and new list do not match. Bitstream order will not be " + + "changed.", + "Bundle: " + bundle.getID())); return; } @@ -434,7 +469,7 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t } else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamDeletion()) { + .canCommunityAdminPerformBitstreamDeletion()) { adminObject = community; } break; @@ -442,10 +477,10 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) { adminObject = item; } else if (AuthorizeConfiguration - .canCollectionAdminPerformBitstreamCreation()) { + .canCollectionAdminPerformBitstreamCreation()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamCreation()) { + .canCommunityAdminPerformBitstreamCreation()) { adminObject = community; } break; @@ -477,7 +512,7 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz // Check authorisation //AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE); log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); super.update(context, bundle); bundleDAO.save(context, bundle); @@ -485,10 +520,10 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz if (bundle.isModified() || bundle.isMetadataModified()) { if (bundle.isMetadataModified()) { context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(), - getIdentifiers(context, bundle))); + getIdentifiers(context, bundle))); } context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(), - null, getIdentifiers(context, bundle))); + null, getIdentifiers(context, bundle))); bundle.clearModified(); bundle.clearDetails(); } @@ -497,12 +532,12 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz @Override public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException { log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); authorizeService.authorizeAction(context, bundle, Constants.DELETE); context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(), - bundle.getName(), getIdentifiers(context, bundle))); + bundle.getName(), getIdentifiers(context, bundle))); // Remove bitstreams List bitstreams = bundle.getBitstreams(); diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index 7dadde72c90a..dbe2d35efe1e 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -29,6 +29,7 @@ import javax.persistence.Transient; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -341,4 +342,17 @@ private CollectionService getCollectionService() { return collectionService; } + /** + * return count of the collection items + * + * @return int + */ + public int countArchivedItems() { + try { + return collectionService.countArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 367c7a5d34b1..36523e7cf946 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -35,6 +35,8 @@ import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CollectionDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -1217,4 +1219,35 @@ public int countCollectionsAdministeredByEntityType(String query, String entityT discoverQuery, query, entityType).getTotalSearchResults(); } + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ + @Override + public int countArchivedItems(Collection collection) throws ItemCountException { + return ItemCounter.getInstance().getCount(collection); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index fa99da33091a..dd6d978936df 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -25,6 +25,7 @@ import javax.persistence.Transient; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CommunityService; @@ -264,4 +265,16 @@ private CommunityService getCommunityService() { return communityService; } + /** + * return count of the community items + * + * @return int + */ + public int countArchivedItems() { + try { + return communityService.countArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index b4053a724f32..821c86dcb228 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -25,6 +25,8 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CommunityDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -82,7 +84,6 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl imp protected CommunityServiceImpl() { super(); - } @Override @@ -712,4 +713,16 @@ public Community findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return communityDAO.countRows(context); } + + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ + @Override + public int countArchivedItems(Community community) throws ItemCountException { + return ItemCounter.getInstance().getCount(community); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 3ad03377cb27..6935203e2356 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -79,7 +79,9 @@ import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.CrisLayoutField; @@ -146,6 +148,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -962,6 +966,16 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); @@ -1196,16 +1210,23 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect @Override public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { - List defaultCollectionPolicies = authorizeService - .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other + // policies or embargos applied + List defaultCollectionBundlePolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // Bitstreams should inherit from DEFAULT_BITSTREAM_READ + List defaultCollectionBitstreamPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, ResourcePolicy.TYPE_CUSTOM); - if (defaultCollectionPolicies.size() < 1) { + if (defaultCollectionBitstreamPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() + " (" + collection.getHandle() + ")" + " has no default bitstream READ policies"); } + // TODO: should we also throw an exception if no DEFAULT_ITEM_READ? // remove all policies from bundles, add new ones // Remove bundles @@ -1216,18 +1237,44 @@ public void adjustBundleBitstreamPolicies(Context context, Item item, Collection authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_WORKFLOW); addCustomPoliciesNotInPlace(context, mybundle, defaultItemPolicies); - addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionPolicies); + addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); for (Bitstream bitstream : mybundle.getBitstreams()) { // if come from InstallItem: remove all submission/workflow policies - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); - addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); - addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, + defaultCollectionBitstreamPolicies); } } } + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream) + throws SQLException, AuthorizeException { + List defaultCollectionPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); + + List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, + ResourcePolicy.TYPE_CUSTOM); + if (defaultCollectionPolicies.size() < 1) { + throw new SQLException("Collection " + collection.getID() + + " (" + collection.getHandle() + ")" + + " has no default bitstream READ policies"); + } + + // remove all policies from bitstream, add new ones + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionPolicies); + } + + private void removeAllPoliciesAndAddDefault(Context context, Bitstream bitstream, + List defaultItemPolicies, + List defaultCollectionPolicies) + throws SQLException, AuthorizeException { + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); + addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); + addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + } + @Override public void adjustItemPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { @@ -1444,9 +1491,18 @@ public boolean isInProgressSubmission(Context context, Item item) throws SQLExce */ - @Override + /** + * Add the default policies, which have not been already added to the given DSpace object + * + * @param context The relevant DSpace Context. + * @param dso The DSpace Object to add policies to + * @param defaultCollectionPolicies list of policies + * @throws SQLException An exception that provides information on a database access error or other errors. + * @throws AuthorizeException Exception indicating the current user of the context does not have permission + * to perform a particular action. + */ public void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso, - List defaultCollectionPolicies) throws SQLException, AuthorizeException { + List defaultCollectionPolicies) throws SQLException, AuthorizeException { boolean appendMode = configurationService .getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode", false); for (ResourcePolicy defaultPolicy : defaultCollectionPolicies) { diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java index fa45ed15e007..7babfce3145b 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java @@ -18,7 +18,8 @@ public enum MetadataSchemaEnum { EPERSON("eperson"), RELATION("relation"), CRIS("cris"), - OAIRECERIF("oairecerif"); + OAIRECERIF("oairecerif"), + PERSON("person"); /** * The String representation of the MetadataSchemaEnum diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index 639cec0e0c30..923b5575fa46 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -61,7 +61,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "text_value") private String value; diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java index daf9a34378ac..9e0c72258e56 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java @@ -110,7 +110,8 @@ protected List findLatestForDiscoveryMetadataValues( // on the left item as a storage/performance improvement. // As a consequence, when searching for related items (using discovery) // on the pages of the right items you won't be able to find the left item. - if (relationshipType.getTilted() != RIGHT && itemEntityType.equals(relationshipType.getLeftType())) { + if (relationshipType.getTilted() != RIGHT + && Objects.equals(relationshipType.getLeftType(), itemEntityType)) { String element = relationshipType.getLeftwardType(); List data = relationshipService .findByLatestItemAndRelationshipType(context, item, relationshipType, true); diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index cbc92c3be5f6..b0b84a0d2dcf 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -22,7 +22,6 @@ import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.util.DCInput; import org.dspace.app.util.DCInputSet; @@ -39,6 +38,8 @@ import org.dspace.core.Constants; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; import org.dspace.submit.model.UploadConfiguration; import org.dspace.submit.model.UploadConfigurationService; @@ -66,7 +67,7 @@ * @see ChoiceAuthority */ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService { - private Logger log = LogManager.getLogger(ChoiceAuthorityServiceImpl.class); + private Logger log = org.apache.logging.log4j.LogManager.getLogger(ChoiceAuthorityServiceImpl.class); // map of field key to authority plugin protected Map controller = new HashMap(); @@ -95,6 +96,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader private SubmissionConfigReader itemSubmissionConfigReader; @@ -108,6 +112,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected AuthorityServiceUtils authorityServiceUtils; @Autowired(required = true) protected ItemService itemService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -701,4 +707,50 @@ private boolean isLinkableToAnEntityWithEntityType(ChoiceAuthority choiceAuthori return choiceAuthority instanceof LinkableEntityAuthority && entityType.equals(((LinkableEntityAuthority) choiceAuthority).getLinkedEntityType()); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 000000000000..bf8194dbd53b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.browse.BrowseIndex; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex extends BrowseIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 497fa08f2faf..123626cd0965 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -200,8 +200,8 @@ protected void addExternalResults(String text, ArrayList choices, List findByStatusAndCreationTimeOlderThan(Context context, List< return list(context, criteriaQuery, false, Process.class, -1, -1); } - } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 6ce376908bf7..f06208e2d151 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -15,6 +15,7 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -534,4 +535,27 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu */ public String getEntityType(Collection collection); + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ + int countArchivedItems(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java index e7b62126650c..c089bcec8df1 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java @@ -14,6 +14,7 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -292,4 +293,13 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu public List findAuthorizedGroupMapped(Context context, List actions) throws SQLException; int countTotal(Context context) throws SQLException; + + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ + int countArchivedItems(Community community) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index e6823690743d..9bd4752776db 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -508,6 +508,26 @@ public void inheritCollectionDefaultPolicies(Context context, Item item, Collect public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException; + /** + * Adjust the Bitstream policies to reflect what have been defined + * during the submission/workflow. The temporary SUBMISSION and WORKFLOW + * policies are removed and the policies defined at the item and collection + * level are copied and inherited as appropriate. Custom selected Item policies + * are copied to the bitstream only if no explicit custom policies were + * already applied to the bitstream. Collection's policies are inherited + * if there are no other policies defined or if the append mode is defined by + * the configuration via the core.authorization.installitem.inheritance-read.append-mode property + * + * @param context DSpace context object + * @param item Item to adjust policies on + * @param collection Collection + * @param bitstream Bitstream to adjust policies on + * @throws SQLException If database error + * @throws AuthorizeException If authorization error + */ + public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) + throws SQLException, AuthorizeException; + /** * Adjust the Item's policies to reflect what have been defined during the diff --git a/dspace-api/src/main/java/org/dspace/core/Utils.java b/dspace-api/src/main/java/org/dspace/core/Utils.java index b9fff20c7674..ea9ed57eca04 100644 --- a/dspace-api/src/main/java/org/dspace/core/Utils.java +++ b/dspace-api/src/main/java/org/dspace/core/Utils.java @@ -16,8 +16,6 @@ import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; @@ -415,7 +413,9 @@ public static String[] tokenize(String metadata) { * @return metadata field key */ public static String standardize(String schema, String element, String qualifier, String separator) { - if (StringUtils.isBlank(qualifier)) { + if (StringUtils.isBlank(element)) { + return null; + } else if (StringUtils.isBlank(qualifier)) { return schema + separator + element; } else { return schema + separator + element + separator + qualifier; @@ -447,14 +447,14 @@ public static String getBaseUrl(String urlString) { */ public static String getHostName(String uriString) { try { - URI uri = new URI(uriString); - String hostname = uri.getHost(); + URL url = new URL(uriString); + String hostname = url.getHost(); // remove the "www." from hostname, if it exists if (hostname != null) { return hostname.startsWith("www.") ? hostname.substring(4) : hostname; } return null; - } catch (URISyntaxException e) { + } catch (MalformedURLException e) { return null; } } diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java index f7ab18c01e54..5891fa017cb0 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java @@ -207,9 +207,10 @@ public void init(Curator curator, String taskId) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); try { - // disallow DTD parsing to ensure no XXE attacks can occur. + // disallow DTD parsing to ensure no XXE attacks can occur // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + factory.setXIncludeAware(false); docBuilder = factory.newDocumentBuilder(); } catch (ParserConfigurationException pcE) { log.error("caught exception: " + pcE); diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768ea..2587e6b0251e 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java index edc8b942139c..a4ed68dbc808 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java @@ -59,37 +59,18 @@ public void internalRun() throws Exception { * new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer"); */ - if (indexClientOptions == IndexClientOptions.REMOVE) { - handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); - indexer.unIndexContent(context, commandLine.getOptionValue("r")); - } else if (indexClientOptions == IndexClientOptions.CLEAN) { - handler.logInfo("Cleaning Index"); - indexer.cleanIndex(); - } else if (indexClientOptions == IndexClientOptions.DELETE) { - handler.logInfo("Deleting Index"); - indexer.deleteIndex(); - } else if (indexClientOptions == IndexClientOptions.BUILD || - indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - handler.logInfo("(Re)building index from scratch."); - indexer.deleteIndex(); - indexer.createIndex(context); - if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } - } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { - handler.logInfo("Optimizing search core."); - indexer.optimize(); - } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } else if (indexClientOptions == IndexClientOptions.INDEX) { - final String param = commandLine.getOptionValue('i'); + Optional indexableObject = Optional.empty(); + + if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) { + final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') : + commandLine.getOptionValue('i'); UUID uuid = null; try { uuid = UUID.fromString(param); } catch (Exception e) { - // nothing to do, it should be an handle + // nothing to do, it should be a handle } - Optional indexableObject = Optional.empty(); + if (uuid != null) { final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid); if (item != null) { @@ -121,7 +102,32 @@ public void internalRun() throws Exception { if (!indexableObject.isPresent()) { throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object"); } - handler.logInfo("Indexing " + param + " force " + commandLine.hasOption("f")); + } + + if (indexClientOptions == IndexClientOptions.REMOVE) { + handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); + indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID()); + } else if (indexClientOptions == IndexClientOptions.CLEAN) { + handler.logInfo("Cleaning Index"); + indexer.cleanIndex(); + } else if (indexClientOptions == IndexClientOptions.DELETE) { + handler.logInfo("Deleting Index"); + indexer.deleteIndex(); + } else if (indexClientOptions == IndexClientOptions.BUILD || + indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + handler.logInfo("(Re)building index from scratch."); + indexer.deleteIndex(); + indexer.createIndex(context); + if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } + } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { + handler.logInfo("Optimizing search core."); + indexer.optimize(); + } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } else if (indexClientOptions == IndexClientOptions.INDEX) { + handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f")); final long startTimeMillis = System.currentTimeMillis(); final long count = indexAll(indexer, ContentServiceFactory.getInstance(). getItemService(), context, indexableObject.get()); @@ -185,7 +191,7 @@ private static long indexAll(final IndexingService indexingService, indexingService.indexContent(context, dso, true, true); count++; if (dso.getIndexedObject() instanceof Community) { - final Community community = (Community) dso; + final Community community = (Community) dso.getIndexedObject(); final String communityHandle = community.getHandle(); for (final Community subcommunity : community.getSubcommunities()) { count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity)); diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba62..8707b733a637 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public Class getDspaceRunnableClass() { return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java index 8dd02f5d44e0..aa90ccf4a371 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java @@ -107,6 +107,10 @@ static List findDirectlyAuthorizedGroupAndEPersonPrefixedIds( ArrayList prefixedIds = new ArrayList<>(); for (int auth : authorizations) { for (ResourcePolicy policy : authService.getPoliciesActionFilter(context, obj, auth)) { + // Avoid NPE in cases where the policy does not have group or eperson + if (policy.getGroup() == null && policy.getEPerson() == null) { + continue; + } String prefixedId = policy.getGroup() == null ? "e" + policy.getEPerson().getID() : "g" + policy.getGroup().getID(); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index d86116a9dcd6..32b69457de47 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -22,6 +22,9 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.utils.DiscoverQueryBuilder; @@ -77,35 +80,80 @@ public static void clearCachedSearchService() { searchService = null; } + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * + * + * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -117,6 +165,18 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName( return configurationService.getDiscoveryConfigurationByNameOrDefault(configurationName); } + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryConfiguration((IndexableObject) dso); + } + public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); return manager @@ -131,47 +191,55 @@ public static List getIgnoredMetadataFields(int type) { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * + * @param context the database context * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Context context, Item item) + throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index bcc169a6df9e..c5f194a076e6 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -272,7 +272,12 @@ public void unIndexContent(Context context, String searchUniqueID, boolean commi try { if (solrSearchCore.getSolr() != null) { - indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID).delete(searchUniqueID); + IndexFactory index = indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID); + if (index != null) { + index.delete(searchUniqueID); + } else { + log.warn("Object not found in Solr index: " + searchUniqueID); + } if (commit) { solrSearchCore.getSolr().commit(); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java index 213d6547d958..606044bea613 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceValuePairsIndexPlugin.java @@ -36,6 +36,7 @@ import org.dspace.discovery.configuration.MultiLanguageDiscoverSearchFilterFacet; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.services.ConfigurationService; +import org.dspace.web.ContextUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -160,7 +161,7 @@ private List findSearchFiltersByMetadataField(Item item, private List getAllDiscoveryConfiguration(Item item) { try { - return SearchUtils.getAllDiscoveryConfigurations(item); + return SearchUtils.getAllDiscoveryConfigurations(ContextUtil.obtainCurrentRequestContext(), item); } catch (SQLException e) { throw new SQLRuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 8e9c1a77aeb5..557b7a5a59fb 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -13,6 +13,9 @@ import java.util.Map; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,6 +25,8 @@ */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); @@ -51,14 +56,26 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject indexableObject) { String name; - if (dso == null) { + if (indexableObject == null) { name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + } else if (indexableObject instanceof IndexableDSpaceObject) { + name = ((IndexableDSpaceObject) indexableObject).getIndexedObject().getHandle(); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); } return getDiscoveryConfigurationByNameOrDefault(name); @@ -105,6 +122,18 @@ public List getIndexAlwaysConfigurations() { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index 3a730b34dd36..a66cb9fc34a5 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -26,6 +27,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -34,6 +40,11 @@ public void setSortFields(List sortFields) { this.sortFields = sortFields; } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index cc867bbf21e0..c4258efdeea5 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -86,7 +86,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde final Collection collection = indexableCollection.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -173,4 +173,4 @@ public List getCollectionLocations(Context context, Collection collectio return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java index 8521b7dda0de..e92819601839 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java @@ -69,7 +69,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCommunity index final Community community = indexableObject.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public List getLocations(Context context, IndexableCommunity indexableDS return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index 8a24b997ffae..f24e9875f006 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -80,11 +80,13 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, // Add item metadata List discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 82247bb972d2..131c2833dd73 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -31,6 +31,7 @@ import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; +import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; @@ -93,6 +94,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java index d98d4a154977..7b933780bfbc 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java @@ -68,6 +68,7 @@ public SolrInputDocument buildDocument(Context context, IndexableMetadataField i // add read permission on doc for anonymous group doc.addField("read", "g" + anonymousGroup.getID()); } + doc.addField(FIELD_NAME_VARIATIONS + "_sort", fieldName); return doc; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java index ca067452ab6c..991efccc36c7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java @@ -384,7 +384,7 @@ private boolean isConfigured(String sortBy, DiscoverySortConfiguration searchSor private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { if (Objects.nonNull(searchSortConfiguration.getSortFields()) && - !searchSortConfiguration.getSortFields().isEmpty()) { + !searchSortConfiguration.getSortFields().isEmpty()) { sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); } return sortOrder; diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 9fda372b4f2a..ceb90abe9715 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -353,8 +353,6 @@ public Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQ List groupCache = group2GroupCacheDAO.findByChildren(context, groups); // now we have all owning groups, also grab all parents of owning groups - // yes, I know this could have been done as one big query and a union, - // but doing the Oracle port taught me to keep to simple SQL! for (Group2GroupCache group2GroupCache : groupCache) { groups.add(group2GroupCache.getParent()); } diff --git a/dspace-api/src/main/java/org/dspace/event/Consumer.java b/dspace-api/src/main/java/org/dspace/event/Consumer.java index 1a8b16e98a0b..f56efcc7bacb 100644 --- a/dspace-api/src/main/java/org/dspace/event/Consumer.java +++ b/dspace-api/src/main/java/org/dspace/event/Consumer.java @@ -10,18 +10,16 @@ import org.dspace.core.Context; /** - * Interface for content event consumers. Note that the consumer cannot tell if - * it is invoked synchronously or asynchronously; the consumer interface and - * sequence of calls is the same for both. Asynchronous consumers may see more - * consume() calls between the start and end of the event stream, if they are - * invoked asynchronously, once in a long time period, rather than synchronously - * after every Context.commit(). - * - * @version $Revision$ + * Interface for content event consumers. Note that the consumer cannot tell + * if it is invoked synchronously or asynchronously; the consumer interface + * and sequence of calls is the same for both. Asynchronous consumers may see + * more consume() calls between the start and end of the event stream, if they + * are invoked asynchronously, once in a long time period, rather than + * synchronously after every Context.commit(). */ public interface Consumer { /** - * Initialize - allocate any resources required to operate. This may include + * Allocate any resources required to operate. This may include * initializing any pooled JMS resources. Called ONCE when created by the * dispatcher pool. This should be used to set up expensive resources that * will remain for the lifetime of the consumer. @@ -31,12 +29,17 @@ public interface Consumer { public void initialize() throws Exception; /** - * Consume an event; events may get filtered at the dispatcher level, hiding - * it from the consumer. This behavior is based on the dispatcher/consumer - * configuration. Should include logic to initialize any resources required - * for a batch of events. + * Consume an event. Events may be filtered by a dispatcher, hiding them + * from the consumer. This behavior is based on the dispatcher/consumer + * configuration. Should include logic to initialize any resources + * required for a batch of events. + * + *

    This method must not commit the context. Committing causes + * re-dispatch of the event queue, which can result in infinite recursion + * leading to memory exhaustion as seen in + * {@link https://github.com/DSpace/DSpace/pull/8756}. * - * @param ctx the execution context object + * @param ctx the current DSpace session * @param event the content event * @throws Exception if error */ diff --git a/dspace-api/src/main/java/org/dspace/event/package-info.java b/dspace-api/src/main/java/org/dspace/event/package-info.java new file mode 100644 index 000000000000..544dfb271a1d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/event/package-info.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + * Actions which alter DSpace model objects can queue {@link Event}s, which + * are presented to {@link Consumer}s by a {@link Dispatcher}. A pool of + * {@code Dispatcher}s is managed by an {@link service.EventService}, guided + * by configuration properties {@code event.dispatcher.*}. + * + *

    One must be careful not to commit the current DSpace {@code Context} + * during event dispatch. {@code commit()} triggers event dispatching, and + * doing this during event dispatch can lead to infinite recursion and + * memory exhaustion. + */ + +package org.dspace.event; diff --git a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java index 8f48cda712bc..756b8654f285 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java @@ -12,6 +12,9 @@ import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; import org.xml.sax.SAXException; @@ -28,11 +31,16 @@ public abstract class Converter { protected Object unmarshall(InputStream input, Class type) throws SAXException, URISyntaxException { try { + XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory(); + // disallow DTD parsing to ensure no XXE attacks can occur + xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); + XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(input); + JAXBContext context = JAXBContext.newInstance(type); Unmarshaller unmarshaller = context.createUnmarshaller(); - return unmarshaller.unmarshal(input); - } catch (JAXBException e) { - throw new RuntimeException("Unable to unmarshall orcid message" + e); + return unmarshaller.unmarshal(xmlStreamReader); + } catch (JAXBException | XMLStreamException e) { + throw new RuntimeException("Unable to unmarshall orcid message: " + e); } } } diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index e7c786d5f8ce..e5a90907c7b6 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -27,13 +27,14 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author Marsa Haoua * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ -public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { +public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean { /** * log4j category */ @@ -49,6 +50,19 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Autowired(required = true) protected VersionHistoryService versionHistoryService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public String mint(Context context, DSpaceObject dso) throws IdentifierException { return mint(context, dso, this.filter); @@ -66,7 +80,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) try { history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { - throw new RuntimeException("A problem occured while accessing the database.", ex); + throw new RuntimeException("A problem occurred while accessing the database.", ex); } String doi = null; @@ -76,7 +90,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) return doi; } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " + log.error("Error while attempting to retrieve information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " @@ -134,7 +148,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) if (history != null) { // versioning is currently supported for items only // if we have a history, we have a item - doi = makeIdentifierBasedOnHistory(context, dso, history); + doi = makeIdentifierBasedOnHistory(context, dso, history, filter); } else { doi = loadOrCreateDOI(context, dso, null, filter).getDoi(); } @@ -145,7 +159,7 @@ public String mint(Context context, DSpaceObject dso, Filter filter) log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return doi; + return doi.startsWith(DOI.SCHEME) ? doi : DOI.SCHEME + doi; } @Override @@ -153,6 +167,21 @@ public void register(Context context, DSpaceObject dso, String identifier) throw register(context, dso, identifier, this.filter); } + @Override + public String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException { + if (!(dso instanceof Item)) { + // DOIs are currently assigned only to Items + return null; + } + + String doi = mint(context, dso, filter); + + register(context, dso, doi, filter); + + return doi; + } + @Override public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { @@ -162,7 +191,7 @@ public void register(Context context, DSpaceObject dso, String identifier, Filte Item item = (Item) dso; if (StringUtils.isEmpty(identifier)) { - identifier = mint(context, dso); + identifier = mint(context, dso, filter); } String doiIdentifier = doiService.formatIdentifier(identifier); @@ -170,10 +199,10 @@ public void register(Context context, DSpaceObject dso, String identifier, Filte // search DOI in our db try { - doi = loadOrCreateDOI(context, dso, doiIdentifier); + doi = loadOrCreateDOI(context, dso, doiIdentifier, filter); } catch (SQLException ex) { - log.error("Error in databse connection: " + ex.getMessage(), ex); - throw new RuntimeException("Error in database conncetion.", ex); + log.error("Error in database connection: " + ex.getMessage(), ex); + throw new RuntimeException("Error in database connection.", ex); } if (DELETED.equals(doi.getStatus()) || diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java index 6a7e347bd13f..94f24baee941 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java @@ -35,6 +35,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -45,7 +46,7 @@ * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ @Component -public class VersionedHandleIdentifierProvider extends IdentifierProvider { +public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean { /** * log4j category */ @@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider { @Autowired(required = true) protected ContentServiceFactory contentServiceFactory; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 7705fd2b5762..78ad6b7b79bb 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -30,6 +30,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,7 +40,8 @@ * @author Ben Bosman (ben at atmire dot com) */ @Component -public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider { +public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider + implements InitializingBean { /** * log4j category */ @@ -65,6 +67,19 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); @@ -306,6 +321,7 @@ public String mint(Context context, DSpaceObject dso) { public DSpaceObject resolve(Context context, String identifier, String... attributes) { // We can do nothing with this, return null try { + identifier = handleService.parseHandle(identifier); return handleService.resolveToObject(context, identifier); } catch (IllegalStateException | SQLException e) { log.error(LogHelper.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), @@ -426,6 +442,19 @@ protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, } } + DSpaceObject itemWithCanonicalHandle = handleService.resolveToObject(context, canonical); + if (itemWithCanonicalHandle != null) { + if (itemWithCanonicalHandle.getID() != previous.getItem().getID()) { + log.warn("The previous version's item (" + previous.getItem().getID() + + ") does not match with the item containing handle " + canonical + + " (" + itemWithCanonicalHandle.getID() + ")"); + } + // Move the original handle from whatever item it's on to the newest version + handleService.modifyHandleDSpaceObject(context, canonical, dso); + } else { + handleService.createHandle(context, dso, canonical); + } + // add a new Identifier for this item: 12345/100.x String idNew = canonical + DOT + version.getVersionNumber(); //Make sure we don't have an old handle hanging around (if our previous version was deleted in the workspace) diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java index 1961ce82744c..33ef058e1696 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIConsumer.java @@ -141,7 +141,6 @@ public void consume(Context ctx, Event event) throws Exception { + item.getID() + " and DOI " + doi + ".", ex); } } - ctx.commit(); } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java index 53230d960831..4c5d8ab8255b 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java @@ -306,6 +306,8 @@ protected List search(String id, String appId) private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); return root.getChildren(); @@ -359,6 +361,8 @@ private List getCiniiIds(String appId, Integer maxResult, String author, String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); int url_len = this.url.length() - 1; SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); List namespaces = Arrays.asList( @@ -424,6 +428,8 @@ private Integer countCiniiElement(String appId, Integer maxResult, String author } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); List namespaces = Arrays diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java new file mode 100644 index 000000000000..dec0b050f396 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; +import org.joda.time.LocalDate; + +/** + * This class is used for CrossRef's Live-Import to extract + * issued attribute. + * Beans are configured in the crossref-integration.xml file. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator dates = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (dates.hasNext()) { + JsonNode date = dates.next(); + LocalDate issuedDate = null; + SimpleDateFormat issuedDateFormat = null; + if (date.has(0) && date.has(1) && date.has(2)) { + issuedDate = new LocalDate( + date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + date.get(2).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + } else if (date.has(0) && date.has(1)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()) + .withMonthOfYear(date.get(1).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM"); + } else if (date.has(0)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy"); + } + values.add(issuedDateFormat.format(issuedDate.toDate())); + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java index 77e34ba9e41b..133717b90e79 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java @@ -397,6 +397,8 @@ private Integer countDocument(String bearer, String query) { } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -437,6 +439,8 @@ private List searchDocumentIds(String bearer, String query, int s } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -491,6 +495,8 @@ private List searchDocument(String bearer, String id, String docTy private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); List namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange")); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java index 81a6631127ac..1a8a7a7861ed 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java @@ -60,7 +60,8 @@ public String executeHttpGetRequest(int timeout, String URL, Map headerParams = params.get(HEADER_PARAMETERS); @@ -71,7 +72,9 @@ public String executeHttpGetRequest(int timeout, String URL, Map contributeMetadata(T t) { LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM"); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy"); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size() && dcDate == null) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -136,8 +140,8 @@ public Collection contributeMetadata(T t) { } j++; } - if (dcDate != null) { - values.add(metadataFieldMapping.toDCValue(field, dcDate.toString())); + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); } else { log.info( "Failed parsing " + dateString + ", check " + diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index b30ea22ca4e4..a6cfa625bbcf 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -292,7 +292,14 @@ public Collection call() throws Exception { int countAttempt = 0; while (StringUtils.isBlank(response) && countAttempt <= attempt) { countAttempt++; + + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response)) { @@ -316,7 +323,13 @@ public Collection call() throws Exception { countAttempt = 0; while (StringUtils.isBlank(response2) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response2)) { @@ -338,6 +351,11 @@ public Collection call() throws Exception { private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // Disallow external entities & entity expansion to protect against XXE attacks + // (NOTE: We receive errors if we disable all DTDs for PubMed, so this is the best we can do) + saxBuilder.setFeature("http://xml.org/sax/features/external-general-entities", false); + saxBuilder.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + saxBuilder.setExpandEntities(false); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); @@ -418,7 +436,13 @@ public Collection call() throws Exception { int countAttempt = 0; while (StringUtils.isBlank(response) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response)) { @@ -441,7 +465,12 @@ public Collection call() throws Exception { countAttempt = 0; while (StringUtils.isBlank(response2) && countAttempt <= attempt) { countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + lastRequest = System.currentTimeMillis(); } if (StringUtils.isBlank(response2)) { @@ -501,4 +530,4 @@ public void setUrlSearch(String urlSearch) { this.urlSearch = urlSearch; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java index ba95e1e9045a..217fe18f1cc5 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java @@ -297,6 +297,8 @@ public Integer count(String query) throws URISyntaxException, ClientProtocolExce } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); Element element = root.getChild("hitCount"); @@ -368,6 +370,8 @@ public List search(String query, Integer size, Integer start) thro String cursorMark = StringUtils.EMPTY; if (StringUtils.isNotBlank(response)) { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); XPathFactory xpfac = XPathFactory.instance(); XPathExpression xPath = xpfac.compile("//responseWrapper/resultList/result", diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java index 36cf8ae2dc49..273a3455a78c 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java @@ -205,6 +205,8 @@ public Integer call() throws Exception { } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); @@ -389,6 +391,8 @@ private Map getRequestParameters(String query, String viewMode, private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); List records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom")); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java index 38632a1a2b72..29801433e3b3 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java @@ -183,6 +183,7 @@ protected T retry(Callable callable) throws MetadataSourceException { log.warn("Error in trying operation " + operationId + " " + retry + " " + warning + ", retrying !", e); } finally { + this.lastRequest = System.currentTimeMillis(); lock.unlock(); } @@ -262,5 +263,7 @@ protected void throwSourceExceptionHook() { */ public abstract void init() throws Exception; - + public void setInterRequestTime(final long interRequestTime) { + this.interRequestTime = interRequestTime; + } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java index b669cd860078..a2e8e221f894 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java @@ -150,6 +150,8 @@ public Integer call() throws Exception { } SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(response)); Element root = document.getRootElement(); XPathExpression xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]", @@ -294,6 +296,8 @@ private boolean isIsi(String query) { private List splitToRecords(String recordsSrc) { try { SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); Document document = saxBuilder.build(new StringReader(recordsSrc)); Element root = document.getRootElement(); String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]", diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java index 33edea112e76..07a79384c77c 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -79,6 +79,8 @@ public class OrcidHistory implements ReloadableEntity { /** * A description of the synchronized resource. */ + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -87,7 +89,7 @@ public class OrcidHistory implements ReloadableEntity { * the owner itself. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "metadata") private String metadata; @@ -102,7 +104,7 @@ public class OrcidHistory implements ReloadableEntity { * The response message incoming from ORCID. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "response_message") private String responseMessage; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java index 4794e89008c3..65b66cd20c3e 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -64,6 +64,8 @@ public class OrcidQueue implements ReloadableEntity { /** * A description of the resource to be synchronized. */ + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -87,7 +89,7 @@ public class OrcidQueue implements ReloadableEntity { */ @Lob @Column(name = "metadata") - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") private String metadata; /** diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java index 1a657343c017..88a1033eca5f 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.orcid.script; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Script configuration for {@link OrcidBulkPush}. @@ -24,20 +19,8 @@ */ public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index 6c521e2133db..049b7845da50 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -21,6 +21,7 @@ import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; +import javax.persistence.Lob; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; @@ -35,6 +36,7 @@ import org.dspace.core.ReloadableEntity; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.hibernate.annotations.Type; /** * This class is the DB Entity representation of the Process object to be stored in the Database @@ -68,6 +70,8 @@ public class Process implements ReloadableEntity { @Enumerated(EnumType.STRING) private ProcessStatus processStatus; + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "parameters") private String parameters; diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index c8a7812a5159..abb700cb10c9 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -37,7 +37,7 @@ public ScriptConfiguration getScriptConfiguration(String name) { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) .sorted(Comparator.comparing(ScriptConfiguration::getName)) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f444a..2da8658c8392 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,28 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,14 +81,27 @@ public void setName(String name) { * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + public boolean isAllowedToExecute(Context context) { + return this.isAllowedToExecute(context, List.of()); + } /** * The getter for the options of the Script diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 0ae1311e697f..80e85acce6d2 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -20,9 +20,12 @@ import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; +import java.net.URI; import java.net.URLEncoder; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; import java.sql.SQLException; import java.text.DateFormat; import java.text.ParseException; @@ -180,6 +183,19 @@ protected SolrLoggerServiceImpl() { @Override public void afterPropertiesSet() throws Exception { + statisticsCoreURL = configurationService.getProperty("solr-statistics.server"); + + if (null != statisticsCoreURL) { + Path statisticsPath = Paths.get(new URI(statisticsCoreURL).getPath()); + statisticsCoreBase = statisticsPath + .getName(statisticsPath.getNameCount() - 1) + .toString(); + } else { + log.warn("Unable to find solr-statistics.server parameter in DSpace configuration. This is required for " + + "sharding statistics."); + statisticsCoreBase = null; + } + solr = solrStatisticsCore.getSolr(); // Read in the file so we don't have to do it all the time @@ -208,6 +224,13 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPer @Override public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser, Date time) { + postView(dspaceObject, request, currentUser, null, time); + } + + @Override + public void postView(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer, Date time) { + if (solr == null) { return; @@ -216,7 +239,7 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, try { - SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser, time); + SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser, referrer, time); if (doc1 == null) { return; } @@ -250,6 +273,12 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, @Override public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser) { + postView(dspaceObject, ip, userAgent, xforwardedfor, currentUser, null); + } + + @Override + public void postView(DSpaceObject dspaceObject, + String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer) { if (solr == null) { return; } @@ -257,7 +286,7 @@ public void postView(DSpaceObject dspaceObject, try { SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor, - currentUser); + currentUser, referrer); if (doc1 == null) { return; } @@ -295,7 +324,7 @@ public void postLogin(DSpaceObject dspaceObject, HttpServletRequest request, EPe try { - SolrInputDocument document = getCommonSolrDoc(dspaceObject, request, currentUser, new Date()); + SolrInputDocument document = getCommonSolrDoc(dspaceObject, request, currentUser, null, new Date()); if (document == null) { return; @@ -331,7 +360,23 @@ public void postLogin(DSpaceObject dspaceObject, HttpServletRequest request, EPe * @throws SQLException in case of a database exception */ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request, - EPerson currentUser, Date time) throws SQLException { + EPerson currentUser) throws SQLException { + return getCommonSolrDoc(dspaceObject, request, currentUser, null, null); + } + + /** + * Returns a solr input document containing common information about the statistics + * regardless if we are logging a search or a view of a DSpace object + * + * @param dspaceObject the object used. + * @param request the current request context. + * @param currentUser the current session's user. + * @param referrer the optional referrer. + * @return a solr input document + * @throws SQLException in case of a database exception + */ + protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer, Date time) throws SQLException { boolean isSpiderBot = request != null && SpiderDetector.isSpider(request); if (isSpiderBot && !configurationService.getBooleanProperty("usage-statistics.logBots", true)) { @@ -354,7 +399,9 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServ } //Also store the referrer - if (request.getHeader("referer") != null) { + if (referrer != null) { + doc1.addField("referrer", referrer); + } else if (request.getHeader("referer") != null) { doc1.addField("referrer", request.getHeader("referer")); } @@ -423,7 +470,8 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServ } protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String ip, String userAgent, - String xforwardedfor, EPerson currentUser) throws SQLException { + String xforwardedfor, EPerson currentUser, + String referrer) throws SQLException { boolean isSpiderBot = SpiderDetector.isSpider(ip); if (isSpiderBot && !configurationService.getBooleanProperty("usage-statistics.logBots", true)) { @@ -444,6 +492,11 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String i doc1.addField("ip", ip); } + // Add the referrer, if present + if (referrer != null) { + doc1.addField("referrer", referrer); + } + InetAddress ipAddress = null; try { String dns; @@ -513,7 +566,7 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String i public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EPerson currentUser, List queries, int rpp, String sortBy, String order, int page, DSpaceObject scope) { try { - SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser, new Date()); + SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser, null, new Date()); if (solrDoc == null) { return; } @@ -563,7 +616,7 @@ public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EP public void postWorkflow(UsageWorkflowEvent usageWorkflowEvent) throws SQLException { initSolrYearCores(); try { - SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null, new Date()); + SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null, null, new Date()); //Log the current collection & the scope ! solrDoc.addField("owningColl", usageWorkflowEvent.getScope().getID().toString()); diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java index e56bca36ad5b..9825f3bfae36 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java @@ -8,6 +8,7 @@ package org.dspace.statistics; import java.util.ArrayList; +import java.util.Date; import java.util.List; import org.apache.logging.log4j.LogManager; @@ -50,10 +51,10 @@ public void receiveEvent(Event event) { if (UsageEvent.Action.VIEW == ue.getAction()) { if (ue.getRequest() != null) { - solrLoggerService.postView(ue.getObject(), ue.getRequest(), currentUser); + solrLoggerService.postView(ue.getObject(), ue.getRequest(), currentUser, ue.getReferrer(), new Date()); } else { solrLoggerService.postView(ue.getObject(), ue.getIp(), ue.getUserAgent(), ue.getXforwardedfor(), - currentUser); + currentUser, ue.getReferrer()); } } else if (UsageEvent.Action.SEARCH == ue.getAction()) { UsageSearchEvent usageSearchEvent = (UsageSearchEvent) ue; diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index dcae4aa4cbcd..7d1015c8e2ba 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java index 8329e54308ed..d9d4f750a067 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java @@ -60,9 +60,15 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser, Date time); + void postView(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer, Date time); + public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser); + void postView(DSpaceObject dspaceObject, + String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer); + public void postLogin(DSpaceObject object, HttpServletRequest request, EPerson currentUser); public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EPerson currentUser, diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java index 209c1e21e74d..5b367d7a8136 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java @@ -14,6 +14,8 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; @@ -153,22 +155,24 @@ protected boolean isLonger(String internalId, int endIndex) { * Retrieves a map of useful metadata about the File (size, checksum, modified) * * @param file The File to analyze - * @param attrs The map where we are storing values + * @param attrs The list of requested metadata values * @return Map of updated metadatas / attrs * @throws IOException */ - public Map about(File file, Map attrs) throws IOException { + public Map about(File file, List attrs) throws IOException { + + Map metadata = new HashMap(); + try { if (file != null && file.exists()) { - this.putValueIfExistsKey(attrs, SIZE_BYTES, file.length()); - if (attrs.containsKey(CHECKSUM)) { - attrs.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); - attrs.put(CHECKSUM_ALGORITHM, CSA); + this.putValueIfExistsKey(attrs, metadata, SIZE_BYTES, file.length()); + if (attrs.contains(CHECKSUM)) { + metadata.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); + metadata.put(CHECKSUM_ALGORITHM, CSA); } - this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(file.lastModified())); - return attrs; + this.putValueIfExistsKey(attrs, metadata, MODIFIED, String.valueOf(file.lastModified())); } - return null; + return metadata; } catch (Exception e) { log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e); throw new IOException(e); @@ -204,13 +208,9 @@ private byte[] generateChecksumFrom(FileInputStream fis) throws IOException, NoS } } - protected void putValueIfExistsKey(Map attrs, String key, Object value) { - this.putEntryIfExistsKey(attrs, key, Map.entry(key, value)); - } - - protected void putEntryIfExistsKey(Map attrs, String key, Map.Entry entry) { - if (attrs.containsKey(key)) { - attrs.put(entry.getKey(), entry.getValue()); + protected void putValueIfExistsKey(List attrs, Map metadata, String key, Object value) { + if (attrs.contains(key)) { + metadata.put(key, value); } } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java index c91db9b1c3b2..2da5e84e8db0 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.List; import java.util.Map; import org.dspace.content.Bitstream; @@ -62,13 +63,13 @@ public interface BitStoreService { * Obtain technical metadata about an asset in the asset store. * * @param bitstream The bitstream to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException; + public Map about(Bitstream bitstream, List attrs) throws IOException; /** * Remove an asset from the asset store. diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java index d6d625ebbdf6..fcebbe676b80 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java @@ -166,12 +166,9 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, bitstream.setStoreNumber(assetstore); bitstreamService.update(context, bitstream); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); + List wantedMetadata = List.of("size_bytes", "checksum", "checksum_algorithm"); + Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); - Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); if (MapUtils.isEmpty(receivedMetadata)) { String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath; log.error(message); @@ -201,13 +198,8 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, } @Override - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { - Map wantedMetadata = new HashMap(); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); - - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); - return receivedMetadata; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { + return this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("checksum", "checksum_algorithm")); } @Override @@ -252,10 +244,9 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio for (Bitstream bitstream : storage) { UUID bid = bitstream.getID(); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("modified", null); - Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + List wantedMetadata = List.of("size_bytes", "modified"); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()) + .about(bitstream, wantedMetadata); // Make sure entries which do not exist are removed @@ -320,7 +311,10 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio context.uncacheEntity(bitstream); } - context.dispatchEvents(); + // Commit actual changes to DB after dispatch events + System.out.print("Performing incremental commit to the database..."); + context.commit(); + System.out.println(" Incremental commit done!"); cleanedBitstreamCount = cleanedBitstreamCount + storage.size(); @@ -350,13 +344,11 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio @Nullable @Override public Long getLastModified(Bitstream bitstream) throws IOException { - Map attrs = new HashMap(); - attrs.put("modified", null); - attrs = this.getStore(bitstream.getStoreNumber()).about(bitstream, attrs); - if (attrs == null || !attrs.containsKey("modified")) { + Map metadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("modified")); + if (metadata == null || !metadata.containsKey("modified")) { return null; } - return Long.valueOf(attrs.get("modified").toString()); + return Long.valueOf(metadata.get("modified").toString()); } /** diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java index 468d22ca738d..2fa9a9dbd5f6 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java @@ -15,6 +15,7 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.List; import java.util.Map; import org.apache.logging.log4j.Logger; @@ -126,13 +127,13 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { /** * Obtain technical metadata about an asset in the asset store. * - * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields - * @return attrs - * A Map with key/value pairs of desired metadata - * @throws java.io.IOException If a problem occurs while obtaining metadata + * @param bitstream The asset to describe + * @param attrs A List of desired metadata fields + * @return attrs A Map with key/value pairs of desired metadata + * @throws java.io.IOException If a problem occurs while obtaining + * metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { try { // potentially expensive, since it may calculate the checksum File file = getFile(bitstream); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index d6056028c7f3..f79b84b30c1e 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -7,6 +7,8 @@ */ package org.dspace.storage.bitstore; +import static java.lang.String.valueOf; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -14,6 +16,8 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; @@ -26,7 +30,6 @@ import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; @@ -43,6 +46,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.IOUtils; +import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; @@ -151,9 +155,8 @@ public S3BitStoreService() {} * * @param s3Service AmazonS3 service */ - protected S3BitStoreService(AmazonS3 s3Service, TransferManager tm) { + protected S3BitStoreService(AmazonS3 s3Service) { this.s3Service = s3Service; - this.tm = tm; } @Override @@ -213,7 +216,7 @@ public void init() throws IOException { } try { - if (!s3Service.doesBucketExist(bucketName)) { + if (!s3Service.doesBucketExistV2(bucketName)) { s3Service.createBucket(bucketName); log.info("Creating new S3 Bucket: " + bucketName); } @@ -311,7 +314,7 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { bitstream.setSizeBytes(scratchFile.length()); // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if // the bucket is encrypted - bitstream.setChecksum(Utils.toHex(md5Digest)); + bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); bitstream.setChecksumAlgorithm(CSA); } catch (AmazonClientException | IOException | InterruptedException e) { @@ -334,86 +337,56 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { * (Does not use getContentMD5, as that is 128-bit MD5 digest calculated on caller's side) * * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ @Override - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { + String key = getFullKey(bitstream.getInternalId()); // If this is a registered bitstream, strip the -R prefix before retrieving if (isRegisteredBitstream(key)) { key = key.substring(REGISTERED_FLAG.length()); } + + Map metadata = new HashMap<>(); + try { + ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); if (objectMetadata != null) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", objectMetadata.getContentLength()); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); - } + putValueIfExistsKey(attrs, metadata, "size_bytes", objectMetadata.getContentLength()); + putValueIfExistsKey(attrs, metadata, "modified", valueOf(objectMetadata.getLastModified().getTime())); } - try ( - InputStream in = get(bitstream); - // Read through a digest input stream that will work out the MD5 - DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); - ) { - in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - attrs.put("checksum", md5Base64); - attrs.put("checksum_algorithm", CSA); - } catch (NoSuchAlgorithmException nsae) { - // Should never happen - log.warn("Caught NoSuchAlgorithmException", nsae); + + putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA); + + if (attrs.contains("checksum")) { + try (InputStream in = get(bitstream); + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)) + ) { + Utils.copy(dis, NullOutputStream.NULL_OUTPUT_STREAM); + byte[] md5Digest = dis.getMessageDigest().digest(); + metadata.put("checksum", Utils.toHex(md5Digest)); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); + } } - return attrs; + + return metadata; } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { - return null; + return metadata; } } catch (AmazonClientException e) { log.error("about(" + key + ", attrs)", e); throw new IOException(e); } - return null; - } - - private boolean isMD5Checksum(String eTag) { - // if the etag is NOT an MD5 it end with -x where x is the number of part used in the multipart upload - return StringUtils.contains(eTag, "-"); - } - - /** - * Populates map values by checking key existence - *
    - * Adds technical metadata about an asset in the asset store, like: - *

      - *
    • size_bytes
    • - *
    • checksum
    • - *
    • checksum_algorithm
    • - *
    • modified
    • - *
    - * - * @param objectMetadata containing technical data - * @param attrs map with keys populated - * @return Map of enriched attrs with values - */ - public Map about(ObjectMetadata objectMetadata, Map attrs) { - if (objectMetadata != null) { - this.putValueIfExistsKey(attrs, SIZE_BYTES, objectMetadata.getContentLength()); - - // put CHECKSUM_ALGORITHM if exists CHECKSUM - this.putValueIfExistsKey(attrs, CHECKSUM, objectMetadata.getETag()); - this.putEntryIfExistsKey(attrs, CHECKSUM, Map.entry(CHECKSUM_ALGORITHM, CSA)); - - this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(objectMetadata.getLastModified().getTime())); - } - return attrs; + return metadata; } /** @@ -577,13 +550,14 @@ public static void main(String[] args) throws Exception { String accessKey = command.getOptionValue("a"); String secretKey = command.getOptionValue("s"); - String assetFile = command.getOptionValue("f"); S3BitStoreService store = new S3BitStoreService(); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); - store.s3Service = new AmazonS3Client(awsCredentials); + store.s3Service = AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)) + .build(); //Todo configurable region Region usEast1 = Region.getRegion(Regions.US_EAST_1); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java index b979811be501..fd41b2486966 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java @@ -102,7 +102,7 @@ public interface BitstreamStorageService { public UUID register(Context context, Bitstream bitstream, int assetstore, String bitstreamPath) throws SQLException, IOException, AuthorizeException; - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; /** * Does the internal_id column in the bitstream row indicate the bitstream diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index ce5b0f8d0a4c..db7fd96c0cc7 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -413,11 +413,10 @@ private static void printDBInfo(Connection connection) throws SQLException { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("===================================="); - System.out.println("WARNING: Oracle support is deprecated!"); - System.out.println("See https://github.com/DSpace/DSpace/issues/8214"); - System.out.println("====================================="); + if (!dbType.equals(DBMS_POSTGRES) && !dbType.equals(DBMS_H2)) { + System.err.println("===================================="); + System.err.println("ERROR: Database type " + dbType + " is UNSUPPORTED!"); + System.err.println("====================================="); } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); @@ -552,10 +551,6 @@ private synchronized static FluentConfiguration setupFlyway(DataSource datasourc String dbType = getDbType(connection); connection.close(); - if (dbType.equals(DBMS_ORACLE)) { - log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214"); - } - // Determine location(s) where Flyway will load all DB migrations ArrayList scriptLocations = new ArrayList<>(); @@ -791,26 +786,6 @@ private static synchronized void cleanDatabase(Flyway flyway, DataSource dataSou // First, run Flyway's clean command on database. // For MOST database types, this takes care of everything flyway.clean(); - - try (Connection connection = dataSource.getConnection()) { - // Get info about which database type we are using - String dbType = getDbType(connection); - - // If this is Oracle, the only way to entirely clean the database - // is to also purge the "Recyclebin". See: - // http://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_9018.htm - if (dbType.equals(DBMS_ORACLE)) { - PreparedStatement statement = null; - try { - statement = connection.prepareStatement("PURGE RECYCLEBIN"); - statement.executeQuery(); - } finally { - if (statement != null && !statement.isClosed()) { - statement.close(); - } - } - } - } } catch (FlywayException fe) { // If any FlywayException (Runtime) is thrown, change it to a SQLException throw new SQLException("Flyway clean error occurred", fe); @@ -1059,11 +1034,6 @@ public static boolean sequenceExists(Connection connection, String sequenceName) // We need to filter by schema in PostgreSQL schemaFilter = true; break; - case DBMS_ORACLE: - // Oracle specific query for a sequence owned by our current DSpace user - // NOTE: No need to filter by schema for Oracle, as Schema = User - sequenceSQL = "SELECT COUNT(1) FROM user_sequences WHERE sequence_name=?"; - break; case DBMS_H2: // In H2, sequences are listed in the "information_schema.sequences" table // SEE: http://www.h2database.com/html/grammar.html#information_schema @@ -1167,11 +1137,6 @@ public static String getSchemaName(Connection connection) // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; - } else if (dbType.equals(DBMS_ORACLE)) { - // For Oracle, default schema is actually the user account - // See: http://stackoverflow.com/a/13341390 - DatabaseMetaData meta = connection.getMetaData(); - schema = meta.getUserName(); } else { // For H2 (in memory), there is no such thing as a schema schema = null; @@ -1369,8 +1334,6 @@ public static String getDbType(Connection connection) String dbms_lc = prodName.toLowerCase(Locale.ROOT); if (dbms_lc.contains("postgresql")) { return DBMS_POSTGRES; - } else if (dbms_lc.contains("oracle")) { - return DBMS_ORACLE; } else if (dbms_lc.contains("h2")) { // Used for unit testing only return DBMS_H2; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 842fc15e1657..f0c4e4e17990 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -78,13 +78,6 @@ protected static Integer dropDBConstraint(Connection connection, String tableNam constraintName += "_" + StringUtils.lowerCase(constraintSuffix); cascade = true; break; - case "oracle": - // In Oracle, constraints are listed in the USER_CONS_COLUMNS table - constraintNameSQL = "SELECT CONSTRAINT_NAME " + - "FROM USER_CONS_COLUMNS " + - "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; - cascade = true; - break; case "h2": // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + @@ -160,9 +153,6 @@ protected static Integer dropDBTable(Connection connection, String tableName) case "postgresql": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; - case "oracle": - dropTableSQL = "DROP TABLE " + tableName + " CASCADE CONSTRAINTS"; - break; case "h2": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; @@ -208,9 +198,6 @@ protected static Integer dropDBSequence(Connection connection, String sequenceNa case "postgresql": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; - case "oracle": - dropSequenceSQL = "DROP SEQUENCE " + sequenceName; - break; case "h2": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; @@ -256,9 +243,6 @@ protected static Integer dropDBView(Connection connection, String viewName) case "postgresql": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; - case "oracle": - dropViewSQL = "DROP VIEW " + viewName + " CASCADE CONSTRAINTS"; - break; case "h2": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index 56c5b474d9fc..758e745ddc86 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -19,10 +19,9 @@ * of the "community" table. This is necessary for the upgrade from 1.3 to 1.4 *

    * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

    * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 6d82055e530e..37100a17f926 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -19,10 +19,9 @@ * from 1.5 to 1.6 *

    * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

    * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index ea72d99b6e29..8e2be91127c8 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -20,10 +20,9 @@ * this column must be renamed to "resource_id". *

    * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

    * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index b3306a9fc93c..0361e6805356 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -67,8 +67,6 @@ public void migrate(Context context) String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index 9aa0f4877c39..4c1cf3365395 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -46,8 +46,6 @@ public void migrate(Context context) throws Exception { String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java index 41b15ddd7a5a..894d3491a181 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.submit.migration; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script @@ -23,9 +18,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java index af3574da699e..6d9f3198fe26 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java @@ -7,7 +7,12 @@ */ package org.dspace.submit.migration; +import java.util.List; + +import org.apache.commons.cli.Options; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; /** * Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so @@ -15,10 +20,37 @@ * * @author Maria Verdonck (Atmire) on 05/01/2021 */ -public class SubmissionFormsMigrationScriptConfiguration extends SubmissionFormsMigrationCliScriptConfiguration { +public class SubmissionFormsMigrationScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index dbbb7bbc5e4d..e5cd86f50458 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -11,6 +11,8 @@ import java.util.Date; import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; @@ -21,6 +23,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** @@ -28,9 +31,8 @@ * set permission on a file. An option is defined by a name such as "open * access", "embargo", "restricted access", etc. and some optional attributes to * better clarify the constraints and input available to the user. For instance - * an embargo option could allow to set a start date not longer than 3 years, - * etc - * + * an embargo option could allow to set a start date not longer than 3 years. + * * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ public class AccessConditionOption { @@ -44,9 +46,9 @@ public class AccessConditionOption { @Autowired private ResourcePolicyService resourcePolicyService; - DateMathParser dateMathParser = new DateMathParser(); + private static final Logger LOG = LogManager.getLogger(); - /** An unique name identifying the access contion option **/ + /** A unique name identifying the access condition option. **/ private String name; /** @@ -147,6 +149,9 @@ public void setEndDateLimit(String endDateLimit) { * startDate should be null. Otherwise startDate may not be null. * @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false, * endDate should be null. Otherwise endDate may not be null. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + * @throws ParseException passed through (indicates problem with a date). */ public void createResourcePolicy(Context context, DSpaceObject obj, String name, String description, Date startDate, Date endDate) @@ -160,7 +165,7 @@ public void createResourcePolicy(Context context, DSpaceObject obj, String name, /** * Validate ResourcePolicy and after update it - * + * * @param context DSpace context * @param resourcePolicy ResourcePolicy to update * @throws SQLException If database error @@ -175,17 +180,25 @@ public void updateResourcePolicy(Context context, ResourcePolicy resourcePolicy) } /** - * Validate the policy properties, throws exceptions if any is not valid - * - * @param context DSpace context - * @param name Name of the resource policy - * @param startDate Start date of the resource policy. If {@link #getHasStartDate()} - * returns false, startDate should be null. Otherwise startDate may not be null. - * @param endDate End date of the resource policy. If {@link #getHasEndDate()} - * returns false, endDate should be null. Otherwise endDate may not be null. + * Validate the policy properties, throws exceptions if any is not valid. + * + * @param context DSpace context. + * @param name Name of the resource policy. + * @param startDate Start date of the resource policy. If + * {@link #getHasStartDate()} returns false, startDate + * should be null. Otherwise startDate may not be null. + * @param endDate End date of the resource policy. If + * {@link #getHasEndDate()} returns false, endDate should + * be null. Otherwise endDate may not be null. + * @throws IllegalStateException if a date is required and absent, + * a date is not required and present, or a date exceeds its + * configured maximum. + * @throws ParseException passed through. */ - private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) - throws SQLException, AuthorizeException, ParseException { + public void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) + throws IllegalStateException, ParseException { + LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}", + name, startDate, endDate); if (getHasStartDate() && Objects.isNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " requires a start date."); } @@ -199,29 +212,33 @@ private void validateResourcePolicy(Context context, String name, Date startDate throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date."); } + DateMathParser dateMathParser = new DateMathParser(); + Date latestStartDate = null; if (Objects.nonNull(getStartDateLimit())) { - latestStartDate = dateMathParser.parseMath(getStartDateLimit()); + latestStartDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getStartDateLimit())); } Date latestEndDate = null; if (Objects.nonNull(getEndDateLimit())) { - latestEndDate = dateMathParser.parseMath(getEndDateLimit()); + latestEndDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getEndDateLimit())); } + LOG.debug(" latestStartDate {}, latestEndDate {}", + latestStartDate, latestEndDate); // throw if startDate after latestStartDate if (Objects.nonNull(startDate) && Objects.nonNull(latestStartDate) && startDate.after(latestStartDate)) { throw new IllegalStateException(String.format( - "The start date of access condition %s should be earlier than %s from now.", - getName(), getStartDateLimit() + "The start date of access condition %s should be earlier than %s from now (%s).", + getName(), getStartDateLimit(), dateMathParser.getNow() )); } // throw if endDate after latestEndDate if (Objects.nonNull(endDate) && Objects.nonNull(latestEndDate) && endDate.after(latestEndDate)) { throw new IllegalStateException(String.format( - "The end date of access condition %s should be earlier than %s from now.", - getName(), getEndDateLimit() + "The end date of access condition %s should be earlier than %s from now (%s).", + getName(), getEndDateLimit(), dateMathParser.getNow() )); } } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java index d4f76a555936..d9a297e1f3d4 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -8,15 +8,11 @@ package org.dspace.subscriptions; -import java.sql.SQLException; import java.util.Objects; import org.apache.commons.cli.Options; -import org.dspace.authorize.AuthorizeServiceImpl; -import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them @@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration dspaceRunnableClass; - @Autowired - private AuthorizeServiceImpl authorizeService; - - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (Objects.isNull(options)) { diff --git a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java index ed137e9d6d8c..ec9a2b12641a 100644 --- a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java +++ b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java @@ -65,6 +65,8 @@ String text() { private Action action; + private String referrer; + private static String checkParams(Action action, HttpServletRequest request, Context context, DSpaceObject object) { StringBuilder eventName = new StringBuilder(); if (action == null) { @@ -187,6 +189,12 @@ public UsageEvent(Action action, String ip, String userAgent, String xforwardedf this.object = object; } + public UsageEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object, + String referrer) { + this(action, request, context, object); + setReferrer(referrer); + } + public HttpServletRequest getRequest() { return request; @@ -240,4 +248,11 @@ public Action getAction() { return this.action; } + public String getReferrer() { + return referrer; + } + + public void setReferrer(String referrer) { + this.referrer = referrer; + } } diff --git a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java index 7c3e13a28e13..9ff252e8ce3f 100644 --- a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java +++ b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java @@ -26,12 +26,15 @@ import java.util.TimeZone; import java.util.regex.Pattern; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** - * This class (Apache license) is copied from Apache Solr and add some tweaks to resolve unneeded dependency: - * https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr - * /util/DateMathParser.java + * This class (Apache license) is copied from Apache Solr, adding some tweaks to + * resolve an unneeded dependency. See + * the original. * + *

    * A Simple Utility class for parsing "math" like strings relating to Dates. * *

    @@ -78,7 +81,7 @@ * "setNow" in the interim). The default value of 'now' is * the time at the moment the DateMathParser instance is * constructed, unless overridden by the {@link CommonParams#NOW NOW} - * request param. + * request parameter. *

    * *

    @@ -88,7 +91,7 @@ * cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default * TimeZone used is UTC unless overridden by the * {@link CommonParams#TZ TZ} - * request param. + * request parameter. *

    * *

    @@ -102,6 +105,8 @@ */ public class DateMathParser { + private static final Logger LOG = LogManager.getLogger(); + public static final TimeZone UTC = TimeZone.getTimeZone("UTC"); /** @@ -119,12 +124,12 @@ public class DateMathParser { /** * A mapping from (uppercased) String labels identifying time units, - * to the corresponding {@link ChronoUnit} enum (e.g. "YEARS") used to + * to the corresponding {@link ChronoUnit} value (e.g. "YEARS") used to * set/add/roll that unit of measurement. * *

    * A single logical unit of time might be represented by multiple labels - * for convenience (ie: DATE==DAYS, + * for convenience (i.e. DATE==DAYS, * MILLI==MILLIS) *

    * @@ -220,6 +225,7 @@ private static LocalDateTime round(LocalDateTime t, String unit) { * * @param now an optional fixed date to use as "NOW" * @param val the string to parse + * @return result of applying the parsed expression to "NOW". * @throws Exception */ public static Date parseMath(Date now, String val) throws Exception { @@ -308,6 +314,7 @@ public TimeZone getTimeZone() { /** * Defines this instance's concept of "now". * + * @param n new value of "now". * @see #getNow */ public void setNow(Date n) { @@ -316,12 +323,12 @@ public void setNow(Date n) { /** * Returns a clone of this instance's concept of "now" (never null). - * * If setNow was never called (or if null was specified) then this method * first defines 'now' as the value dictated by the SolrRequestInfo if it * exists -- otherwise it uses a new Date instance at the moment getNow() * is first called. * + * @return "now". * @see #setNow * @see SolrRequestInfo#getNOW */ @@ -334,9 +341,12 @@ public Date getNow() { } /** - * Parses a string of commands relative "now" are returns the resulting Date. + * Parses a date expression relative to "now". * - * @throws ParseException positions in ParseExceptions are token positions, not character positions. + * @param math a date expression such as "+24MONTHS". + * @return the result of applying the expression to the current time. + * @throws ParseException positions in ParseExceptions are token positions, + * not character positions. */ public Date parseMath(String math) throws ParseException { /* check for No-Op */ @@ -344,6 +354,8 @@ public Date parseMath(String math) throws ParseException { return getNow(); } + LOG.debug("parsing {}", math); + ZoneId zoneId = zone.toZoneId(); // localDateTime is a date and time local to the timezone specified LocalDateTime localDateTime = ZonedDateTime.ofInstant(getNow().toInstant(), zoneId).toLocalDateTime(); @@ -394,11 +406,44 @@ public Date parseMath(String math) throws ParseException { } } + LOG.debug("returning {}", localDateTime); return Date.from(ZonedDateTime.of(localDateTime, zoneId).toInstant()); } private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)"); + /** + * For manual testing. With one argument, test one-argument parseMath. + * With two (or more) arguments, test two-argument parseMath. + * + * @param argv date math expressions. + * @throws java.lang.Exception passed through. + */ + public static void main(String[] argv) + throws Exception { + DateMathParser parser = new DateMathParser(); + try { + Date parsed; + + if (argv.length <= 0) { + System.err.println("Date math expression(s) expected."); + } + + if (argv.length > 0) { + parsed = parser.parseMath(argv[0]); + System.out.format("Applied %s to implicit current time: %s%n", + argv[0], parsed.toString()); + } + + if (argv.length > 1) { + parsed = DateMathParser.parseMath(new Date(), argv[1]); + System.out.format("Applied %s to explicit current time: %s%n", + argv[1], parsed.toString()); + } + } catch (ParseException ex) { + System.err.format("Oops: %s%n", ex.getMessage()); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java new file mode 100644 index 000000000000..a50baf910e77 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.lowerCase; + +import java.util.List; +import java.util.Optional; + +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.services.ConfigurationService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Service class for generation of front-end urls. + */ +@Component +public class FrontendUrlService { + + private static final Logger log = LoggerFactory.getLogger(FrontendUrlService.class); + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private SearchService searchService; + + /** + * Generates front-end url for specified item. + * + * @param context context + * @param item item + * @return front-end url + */ + public String generateUrl(Context context, Item item) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return generateUrlWithSearchService(item, uiURL, context) + .orElseGet(() -> uiURL + "/items/" + item.getID()); + } + + /** + * Generates front-end url for specified bitstream. + * + * @param bitstream bitstream + * @return front-end url + */ + public String generateUrl(Bitstream bitstream) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return uiURL + "/bitstreams/" + bitstream.getID() + "/download"; + } + + private Optional generateUrlWithSearchService(Item item, String uiURLStem, Context context) { + DiscoverQuery entityQuery = new DiscoverQuery(); + entityQuery.setQuery("search.uniqueid:\"Item-" + item.getID() + "\" and entityType:*"); + entityQuery.addSearchField("entityType"); + + try { + DiscoverResult discoverResult = searchService.search(context, entityQuery); + if (isNotEmpty(discoverResult.getIndexableObjects())) { + List entityTypes = discoverResult.getSearchDocument(discoverResult.getIndexableObjects() + .get(0)).get(0).getSearchFieldValues("entityType"); + if (isNotEmpty(entityTypes) && isNotBlank(entityTypes.get(0))) { + return Optional.of(uiURLStem + "/entities/" + lowerCase(entityTypes.get(0)) + "/" + item.getID()); + } + } + } catch (SearchServiceException e) { + log.error("Failed getting entitytype through solr for item " + item.getID() + ": " + e.getMessage()); + } + return Optional.empty(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java new file mode 100644 index 000000000000..2b6f37beb2e1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.IOException; +import java.util.Date; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; + +/** + * This is a custom date deserializer for jackson that make use of our + * {@link MultiFormatDateParser} + * + * Dates are parsed as being in the UTC zone. + * + */ +public class MultiFormatDateDeserializer extends StdDeserializer { + + public MultiFormatDateDeserializer() { + this(null); + } + + public MultiFormatDateDeserializer(Class vc) { + super(vc); + } + + @Override + public Date deserialize(JsonParser jsonparser, DeserializationContext context) + throws IOException, JsonProcessingException { + String date = jsonparser.getText(); + return MultiFormatDateParser.parse(date); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java index 7dcebcc09f52..9342cb8b39e8 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java @@ -447,7 +447,7 @@ private void run() throws SolrServerException, SQLException, IOException { runReport(); logTime(false); for (int processed = updateRecords(MIGQUERY); (processed != 0) - && (numProcessed < numRec); processed = updateRecords(MIGQUERY)) { + && (numProcessed <= numRec); processed = updateRecords(MIGQUERY)) { printTime(numProcessed, false); batchUpdateStats(); if (context.getCacheSize() > CACHE_LIMIT) { @@ -696,4 +696,4 @@ private UUID mapOwner(String owntype, int val) throws SQLException { return null; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java new file mode 100644 index 000000000000..87d354a7f6c7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * Various manipulations of dates and times. + * + * @author mwood + */ +public class TimeHelpers { + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + /** + * Never instantiate this class. + */ + private TimeHelpers() {} + + /** + * Set a Date's time to midnight UTC. + * + * @param from some date-time. + * @return midnight UTC of the supplied date-time. + */ + public static Date toMidnightUTC(Date from) { + GregorianCalendar calendar = new GregorianCalendar(UTC); + calendar.setTime(from); + calendar.set(GregorianCalendar.HOUR_OF_DAY, 0); + calendar.set(GregorianCalendar.MINUTE, 0); + calendar.set(GregorianCalendar.SECOND, 0); + calendar.set(GregorianCalendar.MILLISECOND, 0); + return calendar.getTime(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java b/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java index cd9c2dd3c2fb..d53b939ee44a 100644 --- a/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java +++ b/dspace-api/src/main/java/org/dspace/validation/MetadataValidator.java @@ -113,7 +113,10 @@ public List validate(Context context, InProgressSubmission o } } else { - fieldsName.add(input.getFieldName()); + String fieldName = input.getFieldName(); + if (fieldName != null) { + fieldsName.add(fieldName); + } } for (String fieldName : fieldsName) { diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index 2550f68de19f..6d5281d2291a 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -51,6 +51,7 @@ metadata.bitstream.iiif-virtual.bytes = File size metadata.bitstream.iiif-virtual.checksum = Checksum org.dspace.app.itemexport.no-result = The DSpaceObject that you specified has no items. +org.dspace.app.util.SyndicationFeed.no-description = No Description org.dspace.checker.ResultsLogger.bitstream-format = Bitstream format org.dspace.checker.ResultsLogger.bitstream-found = Bitstream found org.dspace.checker.ResultsLogger.bitstream-id = Bitstream ID @@ -121,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl index f32942a302a2..d9f6cd361434 100644 --- a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - @@ -47,4 +47,4 @@ - \ No newline at end of file + diff --git a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl index 84c62158fe75..d9a9745a1b10 100644 --- a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - - \ No newline at end of file + diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql deleted file mode 100644 index 7907fccc00ae..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- Copyright 2010-2017 Boxfuse GmbH --- --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. --- ------------------ --- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql --- --- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------- - -DROP INDEX "${schema}"."${table}_vr_idx"; -DROP INDEX "${schema}"."${table}_ir_idx"; -ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; -ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; -ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; -ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); -UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql index 7548fa4c6acb..edebe6e087fb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -15,7 +15,7 @@ -- ----------------- -- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/postgresql/upgradeMetaDataTable.sql -- -- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md index 8088c6ccca62..87e114ca53a5 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md @@ -4,33 +4,25 @@ in Production. Instead, DSpace uses the H2 Database to perform Unit Testing during development. -By default, the DSpace Unit Testing environment configures H2 to run in -"Oracle Mode" and initializes the H2 database using the scripts in this directory. -These database migrations are automatically called by [Flyway](http://flywaydb.org/) -when the `DatabaseManager` initializes itself (see `initializeDatabase()` method). - -The H2 migrations in this directory are *based on* the Oracle Migrations, but -with some modifications in order to be valid in H2. - -## Oracle vs H2 script differences +By default, the DSpace Unit Testing environment configures H2 to run in memory +and initializes the H2 database using the scripts in this directory. See +`[src]/dspace-api/src/test/data/dspaceFolder/config/local.cfg`. -One of the primary differences between the Oracle scripts and these H2 ones -is in the syntax of the `ALTER TABLE` command. Unfortunately, H2's syntax for -that command differs greatly from Oracle (and PostgreSQL as well). +These database migrations are automatically called by [Flyway](http://flywaydb.org/) +in `DatabaseUtils`. -Most of the remainder of the scripts contain the exact Oracle syntax (which is -usually valid in H2). But, to you can always `diff` scripts of the same name -for further syntax differences. +The H2 migrations in this directory all use H2's grammar/syntax. +For additional info see the [H2 SQL Grammar](https://www.h2database.com/html/grammar.html). -For additional info see the [H2 SQL Grammar](http://www.h2database.com/html/grammar.html). ## More Information on Flyway The SQL scripts in this directory are H2-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql index c7cfdd84d551..833765ec3c4a 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql @@ -5,7 +5,6 @@ -- -- http://www.dspace.org/license/ -- - ------------------------------------------------------------------------------- -- Sequences for Process within Group feature ------------------------------------------------------------------------------- @@ -14,4 +13,9 @@ CREATE TABLE Process2Group ( process_id INTEGER REFERENCES Process(process_id), group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE -); \ No newline at end of file +); +----------------------------------------------------------------------------------- +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) +----------------------------------------------------------------------------------- + +DROP SEQUENCE history_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql similarity index 77% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql index 95d07be477d5..e4544e1de729 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -7,7 +7,7 @@ -- ----------------------------------------------------------------------------------- --- Create columns copy_left and copy_right for RelationshipType +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) ----------------------------------------------------------------------------------- -ALTER TABLE relationship_type ADD tilted INTEGER; +DROP SEQUENCE IF EXISTS history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql similarity index 60% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql index 0db294c1c13a..8aec44a7f6f2 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -7,8 +7,11 @@ -- ----------------------------------------------------------------------------------- --- Create columns copy_left and copy_right for RelationshipType +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 ----------------------------------------------------------------------------------- -ALTER TABLE relationship_type ADD copy_to_left NUMBER(1) DEFAULT 0 NOT NULL; -ALTER TABLE relationship_type ADD copy_to_right NUMBER(1) DEFAULT 0 NOT NULL; +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..7641eb9fc2c0 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description SET DATA TYPE CLOB; +ALTER TABLE orcid_queue ALTER COLUMN description SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..1028ba370c47 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql deleted file mode 100644 index fff1fe154f57..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql +++ /dev/null @@ -1,90 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create COMMUNITY handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - -------------------------------------------------------------- --- This will create COLLECTION handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md deleted file mode 100644 index 6cef123859ca..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# Oracle Flyway Database Migrations (i.e. Upgrades) - ---- -WARNING: Oracle Support is deprecated. -See https://github.com/DSpace/DSpace/issues/8214 ---- - -The SQL scripts in this directory are Oracle-specific database migrations. They are -used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). -As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using -and then executes the appropriate upgrade script(s) to bring it up to the latest -version. - -If any failures occur, Flyway will "rollback" the upgrade script which resulted -in an error and log the issue in the DSpace log file at `[dspace]/log/dspace.log.[date]` - -**WARNING:** IT IS NOT RECOMMENDED TO RUN THESE SCRIPTS MANUALLY. If you do so, -Flyway will may throw failures the next time you startup DSpace, as Flyway will -not realize you manually ran one or more scripts. - -Please see the Flyway Documentation for more information: http://flywaydb.org/ - -## Oracle Porting Notes for the Curious - -Oracle is missing quite a number of cool features found in Postgres, so -workarounds had to be found, most of which are hidden behind tests in -DatabaseManager. If Oracle is your DBMS, the workarounds are activated: - -Oracle doesn't like ';' characters in JDBC SQL - they have all been removed -from the DSpace source, including code in the .sql file reader to strip ;'s. - -browse code - LIMIT and OFFSET is used to limit browse results, and an -Oracle-hack is used to limit the result set to a given size - -Oracle has no boolean data type, so a new schema file was created that -uses NUMBER(1) (AKA 'integers') and code is inserted everywhere to use 0 for -false and 1 for true if DSpace is using Oracle. - -Oracle doesn't have a TEXT data type either, so TEXT columns are defined -as VARCHAR2 in the Oracle-specific schema. - -Oracle doesn't allow dynamic naming for objects, so our cute trick to -derive the name of the sequence by appending _seq to the table name -in a function doesn't work in Oracle - workaround is to insert Oracle -code to generate the name of the sequence and then place that into -our SQL calls to generate a new ID. - -Oracle doesn't let you directly set the value of sequences, so -update-sequences.sql is forced to use a special script sequpdate.sql -to update the sequences. - -Bitstream had a column 'size' which is a reserved word in Oracle, -so this had to be changed to 'size_bytes' with corresponding code changes. - -VARCHAR2 has a limit of 4000 characters, so DSpace text data is limited to 4k. -Going to the CLOB data type can get around that, but seemed like too much effort -for now. Note that with UTF-8 encoding that 4k could translate to 1300 -characters worst-case (every character taking up 3 bytes is the worst case -scenario.) - -### UPDATE 5 April 2007 - -CLOBs are now used as follows: -MetadataValue:text_value -Community:introductory_text -Community:copyright_text -Collection:introductory_text -Collection:license -Collection:copyright_text - -DatabaseManager had to have some of the type checking changed, because Oracle's -JDBC driver is reporting INTEGERS as type DECIMAL. - -Oracle doesn't like it when you reference table names in lower case when -getting JDBC metadata for the tables, so they are converted in TableRow -to upper case. - -### UPDATE 27 November 2012 - -Oracle complains with ORA-01408 if you attempt to create an index on a column which -has already had the UNIQUE contraint added (such an index is implicit in maintaining the uniqueness -of the column). See [DS-1370](https://jira.duraspace.org/browse/DS-1370) for details. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql deleted file mode 100644 index 157274e05d66..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql +++ /dev/null @@ -1,550 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE bitstreamformatregistry_seq; -CREATE SEQUENCE fileextension_seq; -CREATE SEQUENCE bitstream_seq; -CREATE SEQUENCE eperson_seq; --- start group sequence at 0, since Anonymous group = 0 -CREATE SEQUENCE epersongroup_seq MINVALUE 0 START WITH 0; -CREATE SEQUENCE item_seq; -CREATE SEQUENCE bundle_seq; -CREATE SEQUENCE item2bundle_seq; -CREATE SEQUENCE bundle2bitstream_seq; -CREATE SEQUENCE dctyperegistry_seq; -CREATE SEQUENCE dcvalue_seq; -CREATE SEQUENCE community_seq; -CREATE SEQUENCE collection_seq; -CREATE SEQUENCE community2community_seq; -CREATE SEQUENCE community2collection_seq; -CREATE SEQUENCE collection2item_seq; -CREATE SEQUENCE resourcepolicy_seq; -CREATE SEQUENCE epersongroup2eperson_seq; -CREATE SEQUENCE handle_seq; -CREATE SEQUENCE workspaceitem_seq; -CREATE SEQUENCE workflowitem_seq; -CREATE SEQUENCE tasklistitem_seq; -CREATE SEQUENCE registrationdata_seq; -CREATE SEQUENCE subscription_seq; -CREATE SEQUENCE history_seq; -CREATE SEQUENCE historystate_seq; -CREATE SEQUENCE communities2item_seq; -CREATE SEQUENCE itemsbyauthor_seq; -CREATE SEQUENCE itemsbytitle_seq; -CREATE SEQUENCE itemsbydate_seq; -CREATE SEQUENCE itemsbydateaccessioned_seq; - - -------------------------------------------------------- --- BitstreamFormatRegistry table -------------------------------------------------------- -CREATE TABLE BitstreamFormatRegistry -( - bitstream_format_id INTEGER PRIMARY KEY, - mimetype VARCHAR2(48), - short_description VARCHAR2(128) UNIQUE, - description VARCHAR2(2000), - support_level INTEGER, - -- Identifies internal types - internal NUMBER(1) -); - -------------------------------------------------------- --- FileExtension table -------------------------------------------------------- -CREATE TABLE FileExtension -( - file_extension_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - extension VARCHAR2(16) -); - -------------------------------------------------------- --- Bitstream table -------------------------------------------------------- -CREATE TABLE Bitstream -( - bitstream_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - name VARCHAR2(256), - size_bytes INTEGER, - checksum VARCHAR2(64), - checksum_algorithm VARCHAR2(32), - description VARCHAR2(2000), - user_format_description VARCHAR2(2000), - source VARCHAR2(256), - internal_id VARCHAR2(256), - deleted NUMBER(1), - store_number INTEGER, - sequence_id INTEGER -); - -------------------------------------------------------- --- EPerson table -------------------------------------------------------- -CREATE TABLE EPerson -( - eperson_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - password VARCHAR2(64), - firstname VARCHAR2(64), - lastname VARCHAR2(64), - can_log_in NUMBER(1), - require_certificate NUMBER(1), - self_registered NUMBER(1), - last_active TIMESTAMP, - sub_frequency INTEGER, - phone VARCHAR2(32) -); - -------------------------------------------------------- --- EPersonGroup table -------------------------------------------------------- -CREATE TABLE EPersonGroup -( - eperson_group_id INTEGER PRIMARY KEY, - name VARCHAR2(256) UNIQUE -); - -------------------------------------------------------- --- Item table -------------------------------------------------------- -CREATE TABLE Item -( - item_id INTEGER PRIMARY KEY, - submitter_id INTEGER REFERENCES EPerson(eperson_id), - in_archive NUMBER(1), - withdrawn NUMBER(1), - last_modified TIMESTAMP, - owning_collection INTEGER -); - -------------------------------------------------------- --- Bundle table -------------------------------------------------------- -CREATE TABLE Bundle -( - bundle_id INTEGER PRIMARY KEY, - mets_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT - primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - -------------------------------------------------------- --- Item2Bundle table -------------------------------------------------------- -CREATE TABLE Item2Bundle -( - id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - bundle_id INTEGER REFERENCES Bundle(bundle_id) -); - --- index by item_id -CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id); - -------------------------------------------------------- --- Bundle2Bitstream table -------------------------------------------------------- -CREATE TABLE Bundle2Bitstream -( - id INTEGER PRIMARY KEY, - bundle_id INTEGER REFERENCES Bundle(bundle_id), - bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - --- index by bundle_id -CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id); - -------------------------------------------------------- --- DCTypeRegistry table -------------------------------------------------------- -CREATE TABLE DCTypeRegistry -( - dc_type_id INTEGER PRIMARY KEY, - element VARCHAR2(64), - qualifier VARCHAR2(64), - scope_note VARCHAR2(2000), - UNIQUE(element, qualifier) -); - -------------------------------------------------------- --- DCValue table -------------------------------------------------------- -CREATE TABLE DCValue -( - dc_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - dc_type_id INTEGER REFERENCES DCTypeRegistry(dc_type_id), - text_value VARCHAR2(2000), - text_lang VARCHAR2(24), - place INTEGER, - source_id INTEGER -); - --- An index for item_id - almost all access is based on --- instantiating the item object, which grabs all dcvalues --- related to that item -CREATE INDEX dcvalue_item_idx on DCValue(item_id); - -------------------------------------------------------- --- Community table -------------------------------------------------------- -CREATE TABLE Community -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128) UNIQUE, - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000) -); - -------------------------------------------------------- --- Collection table -------------------------------------------------------- -CREATE TABLE Collection -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license VARCHAR2(2000), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -------------------------------------------------------- --- Community2Community table -------------------------------------------------------- -CREATE TABLE Community2Community -( - id INTEGER PRIMARY KEY, - parent_comm_id INTEGER REFERENCES Community(community_id), - child_comm_id INTEGER REFERENCES Community(community_id) -); - -------------------------------------------------------- --- Community2Collection table -------------------------------------------------------- -CREATE TABLE Community2Collection -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - -------------------------------------------------------- --- Collection2Item table -------------------------------------------------------- -CREATE TABLE Collection2Item -( - id INTEGER PRIMARY KEY, - collection_id INTEGER REFERENCES Collection(collection_id), - item_id INTEGER REFERENCES Item(item_id) -); - --- index by collection_id -CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id); - -------------------------------------------------------- --- ResourcePolicy table -------------------------------------------------------- -CREATE TABLE ResourcePolicy -( - policy_id INTEGER PRIMARY KEY, - resource_type_id INTEGER, - resource_id INTEGER, - action_id INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - start_date DATE, - end_date DATE -); - --- index by resource_type,resource_id - all queries by --- authorization manager are select type=x, id=y, action=z -CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id); - -------------------------------------------------------- --- EPersonGroup2EPerson table -------------------------------------------------------- -CREATE TABLE EPersonGroup2EPerson -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - eperson_id INTEGER REFERENCES EPerson(eperson_id) -); - --- Index by group ID (used heavily by AuthorizeManager) -CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id); - - -------------------------------------------------------- --- Handle table -------------------------------------------------------- -CREATE TABLE Handle -( - handle_id INTEGER PRIMARY KEY, - handle VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER -); - -------------------------------------------------------- --- WorkspaceItem table -------------------------------------------------------- -CREATE TABLE WorkspaceItem -( - workspace_item_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), -- boolean - published_before NUMBER(1), - multiple_files NUMBER(1), - -- How for the user has got in the submit process - stage_reached INTEGER -); - -------------------------------------------------------- --- WorkflowItem table -------------------------------------------------------- -CREATE TABLE WorkflowItem -( - workflow_id INTEGER PRIMARY KEY, - item_id INTEGER UNIQUE REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - state INTEGER, - owner INTEGER REFERENCES EPerson(eperson_id), - - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI - -); - -------------------------------------------------------- --- TasklistItem table -------------------------------------------------------- -CREATE TABLE TasklistItem -( - tasklist_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - workflow_id INTEGER REFERENCES WorkflowItem(workflow_id) -); - - -------------------------------------------------------- --- RegistrationData table -------------------------------------------------------- -CREATE TABLE RegistrationData -( - registrationdata_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - token VARCHAR2(48), - expires TIMESTAMP -); - - -------------------------------------------------------- --- Subscription table -------------------------------------------------------- -CREATE TABLE Subscription -( - subscription_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - - -------------------------------------------------------- --- History table -------------------------------------------------------- -CREATE TABLE History -( - history_id INTEGER PRIMARY KEY, - -- When it was stored - creation_date TIMESTAMP, - -- A checksum to keep INTEGERizations from being stored more than once - checksum VARCHAR2(32) UNIQUE -); - -------------------------------------------------------- --- HistoryState table -------------------------------------------------------- -CREATE TABLE HistoryState -( - history_state_id INTEGER PRIMARY KEY, - object_id VARCHAR2(64) -); - ------------------------------------------------------------- --- Browse subsystem tables and views ------------------------------------------------------------- - -------------------------------------------------------- --- Communities2Item table -------------------------------------------------------- -CREATE TABLE Communities2Item -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - item_id INTEGER REFERENCES Item(item_id) -); - -------------------------------------------------------- --- Community2Item view ------------------------------------------------------- -CREATE VIEW Community2Item as -SELECT Community2Collection.community_id, Collection2Item.item_id -FROM Community2Collection, Collection2Item -WHERE Collection2Item.collection_id = Community2Collection.collection_id -; - -------------------------------------------------------- --- ItemsByAuthor table -------------------------------------------------------- -CREATE TABLE ItemsByAuthor -( - items_by_author_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - author VARCHAR2(2000), - sort_author VARCHAR2(2000) -); - --- index by sort_author, of course! -CREATE INDEX sort_author_idx on ItemsByAuthor(sort_author); - -------------------------------------------------------- --- CollectionItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CollectionItemsByAuthor as -SELECT Collection2Item.collection_id, ItemsByAuthor.* -FROM ItemsByAuthor, Collection2Item -WHERE ItemsByAuthor.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CommunityItemsByAuthor as -SELECT Communities2Item.community_id, ItemsByAuthor.* -FROM ItemsByAuthor, Communities2Item -WHERE ItemsByAuthor.item_id = Communities2Item.item_id -; - ----------------------------------------- --- ItemsByTitle table ----------------------------------------- -CREATE TABLE ItemsByTitle -( - items_by_title_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - title VARCHAR2(2000), - sort_title VARCHAR2(2000) -); - --- index by the sort_title -CREATE INDEX sort_title_idx on ItemsByTitle(sort_title); - - -------------------------------------------------------- --- CollectionItemsByTitle view -------------------------------------------------------- -CREATE VIEW CollectionItemsByTitle as -SELECT Collection2Item.collection_id, ItemsByTitle.* -FROM ItemsByTitle, Collection2Item -WHERE ItemsByTitle.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByTitle view -------------------------------------------------------- -CREATE VIEW CommunityItemsByTitle as -SELECT Communities2Item.community_id, ItemsByTitle.* -FROM ItemsByTitle, Communities2Item -WHERE ItemsByTitle.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDate table -------------------------------------------------------- -CREATE TABLE ItemsByDate -( - items_by_date_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_issued VARCHAR2(2000) -); - --- sort by date -CREATE INDEX date_issued_idx on ItemsByDate(date_issued); - -------------------------------------------------------- --- CollectionItemsByDate view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDate as -SELECT Collection2Item.collection_id, ItemsByDate.* -FROM ItemsByDate, Collection2Item -WHERE ItemsByDate.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDate view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDate as -SELECT Communities2Item.community_id, ItemsByDate.* -FROM ItemsByDate, Communities2Item -WHERE ItemsByDate.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDateAccessioned table -------------------------------------------------------- -CREATE TABLE ItemsByDateAccessioned -( - items_by_date_accessioned_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_accessioned VARCHAR2(2000) -); - -------------------------------------------------------- --- CollectionItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDateAccession as -SELECT Collection2Item.collection_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Collection2Item -WHERE ItemsByDateAccessioned.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDateAccession as -SELECT Communities2Item.community_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Communities2Item -WHERE ItemsByDateAccessioned.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql deleted file mode 100644 index 37d7e115eb53..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql +++ /dev/null @@ -1,57 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE epersongroup2workspaceitem_seq; - -------------------------------------------------------------------------------- --- create the new EPersonGroup2WorkspaceItem table -------------------------------------------------------------------------------- - -CREATE TABLE EPersonGroup2WorkspaceItem -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - workspace_item_id INTEGER REFERENCES WorkspaceItem(workspace_item_id) -); - -------------------------------------------------------------------------------- --- modification to collection table to support being able to change the --- submitter and collection admin group names -------------------------------------------------------------------------------- -ALTER TABLE collection ADD submitter INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE collection ADD admin INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE eperson ADD netid VARCHAR2(64) UNIQUE; - -------------------------------------------------------------------------------- --- Additional indices for performance -------------------------------------------------------------------------------- - --- index by resource id and resource type id -CREATE INDEX handle_resource_id_type_idx ON handle(resource_id, resource_type_id); - --- Indexing browse tables update/re-index performance -CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id ); -CREATE INDEX ItemsByAuthor_item_id_idx ON ItemsByAuthor(item_id); -CREATE INDEX ItemsByTitle_item_id_idx ON ItemsByTitle(item_id); -CREATE INDEX ItemsByDate_item_id_idx ON ItemsByDate(item_id); -CREATE INDEX ItemsByDateAcc_item_id_idx ON ItemsByDateAccessioned(item_id); - --- Improve mapping tables -CREATE INDEX Com2Coll_community_id_idx ON Community2Collection(community_id); -CREATE INDEX Com2Coll_collection_id_idx ON Community2Collection(collection_id); -CREATE INDEX Coll2Item_item_id_idx ON Collection2Item( item_id ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql deleted file mode 100644 index a713ced8bbb2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql +++ /dev/null @@ -1,133 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ---------------------------------------- --- Update MetadataValue to include CLOB ---------------------------------------- - -CREATE TABLE MetadataValueTemp -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value CLOB, - text_lang VARCHAR(64), - place INTEGER -); - -INSERT INTO MetadataValueTemp -SELECT * FROM MetadataValue; - -DROP VIEW dcvalue; -DROP TABLE MetadataValue; -ALTER TABLE MetadataValueTemp RENAME TO MetadataValue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); - ------------------------------------- --- Update Community to include CLOBs ------------------------------------- - -CREATE TABLE CommunityTemp -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text CLOB, - side_bar_text VARCHAR2(2000) -); - -INSERT INTO CommunityTemp -SELECT * FROM Community; - -DROP TABLE Community CASCADE CONSTRAINTS; -ALTER TABLE CommunityTemp RENAME TO Community; - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_parent -FOREIGN KEY (parent_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_child -FOREIGN KEY (child_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -ALTER TABLE Communities2Item ADD CONSTRAINT fk_c2i_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -------------------------------------- --- Update Collection to include CLOBs -------------------------------------- - -CREATE TABLE CollectionTemp -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license CLOB, - copyright_text CLOB, - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ), - admin INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -INSERT INTO CollectionTemp -SELECT * FROM Collection; - -DROP TABLE Collection CASCADE CONSTRAINTS; -ALTER TABLE CollectionTemp RENAME TO Collection; - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Collection2Item ADD CONSTRAINT fk_c2i_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkspaceItem ADD CONSTRAINT fk_wsi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkflowItem ADD CONSTRAINT fk_wfi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Subscription ADD CONSTRAINT fk_subs_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql deleted file mode 100644 index 54cf10067b91..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ /dev/null @@ -1,371 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------- --- Sequences for Group within Group feature -------------------------------------------------------------------------------- -CREATE SEQUENCE group2group_seq; -CREATE SEQUENCE group2groupcache_seq; - ------------------------------------------------------- --- Group2Group table, records group membership in other groups ------------------------------------------------------- -CREATE TABLE Group2Group -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - ------------------------------------------------------- --- Group2GroupCache table, is the 'unwound' hierarchy in --- Group2Group. It explicitly names every parent child --- relationship, even with nested groups. For example, --- If Group2Group lists B is a child of A and C is a child of B, --- this table will have entries for parent(A,B), and parent(B,C) --- AND parent(A,C) so that all of the child groups of A can be --- looked up in a single simple query ------------------------------------------------------- -CREATE TABLE Group2GroupCache -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - - -------------------------------------------------------- --- New Metadata Tables and Sequences -------------------------------------------------------- -CREATE SEQUENCE metadataschemaregistry_seq; -CREATE SEQUENCE metadatafieldregistry_seq; -CREATE SEQUENCE metadatavalue_seq; - --- MetadataSchemaRegistry table -CREATE TABLE MetadataSchemaRegistry -( - metadata_schema_id INTEGER PRIMARY KEY, - namespace VARCHAR(256) UNIQUE, - short_id VARCHAR(32) -); - --- MetadataFieldRegistry table -CREATE TABLE MetadataFieldRegistry -( - metadata_field_id INTEGER PRIMARY KEY, - metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id), - element VARCHAR(64), - qualifier VARCHAR(64), - scope_note VARCHAR2(2000) -); - --- MetadataValue table -CREATE TABLE MetadataValue -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value VARCHAR2(2000), - text_lang VARCHAR(24), - place INTEGER -); - --- Create the DC schema -INSERT INTO MetadataSchemaRegistry VALUES (1,'http://dublincore.org/documents/dcmi-terms/','dc'); - --- Migrate the existing DCTypes into the new metadata field registry -INSERT INTO MetadataFieldRegistry - (metadata_schema_id, metadata_field_id, element, qualifier, scope_note) - SELECT '1' AS metadata_schema_id, dc_type_id, element, - qualifier, scope_note FROM dctyperegistry; - --- Copy the DCValues into the new MetadataValue table -INSERT INTO MetadataValue (item_id, metadata_field_id, text_value, text_lang, place) - SELECT item_id, dc_type_id, text_value, text_lang, place FROM dcvalue; - -DROP TABLE dcvalue; -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - - --- After copying data from dctypregistry to metadataschemaregistry, we need to reset our sequences --- Update metadatafieldregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_field_id) INTO curr FROM metadatafieldregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatafieldregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatafieldregistry_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadatavalue_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_value_id) INTO curr FROM metadatavalue; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatavalue_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatavalue_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadataschemaregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_schema_id) INTO curr FROM metadataschemaregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadataschemaregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadataschemaregistry_seq START WITH ' || NVL(curr,1); -END; -/ - --- Drop the old dctyperegistry -DROP TABLE dctyperegistry; - --- create indexes for the metadata tables -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); -CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id); - - -------------------------------------------------------- --- Create the checksum checker tables -------------------------------------------------------- --- list of the possible results as determined --- by the system or an administrator - -CREATE TABLE checksum_results -( - result_code VARCHAR(64) PRIMARY KEY, - result_description VARCHAR2(2000) -); - - --- This table has a one-to-one relationship --- with the bitstream table. A row will be inserted --- every time a row is inserted into the bitstream table, and --- that row will be updated every time the checksum is --- re-calculated. - -CREATE TABLE most_recent_checksum -( - bitstream_id INTEGER PRIMARY KEY, - to_be_processed NUMBER(1) NOT NULL, - expected_checksum VARCHAR(64) NOT NULL, - current_checksum VARCHAR(64) NOT NULL, - last_process_start_date TIMESTAMP NOT NULL, - last_process_end_date TIMESTAMP NOT NULL, - checksum_algorithm VARCHAR(64) NOT NULL, - matched_prev_checksum NUMBER(1) NOT NULL, - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - - --- A row will be inserted into this table every --- time a checksum is re-calculated. - -CREATE SEQUENCE checksum_history_seq; - -CREATE TABLE checksum_history -( - check_id INTEGER PRIMARY KEY, - bitstream_id INTEGER, - process_start_date TIMESTAMP, - process_end_date TIMESTAMP, - checksum_expected VARCHAR(64), - checksum_calculated VARCHAR(64), - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - --- this will insert into the result code --- the initial results - -insert into checksum_results -values -( - 'INVALID_HISTORY', - 'Install of the cheksum checking code do not consider this history as valid' -); - -insert into checksum_results -values -( - 'BITSTREAM_NOT_FOUND', - 'The bitstream could not be found' -); - -insert into checksum_results -values -( - 'CHECKSUM_MATCH', - 'Current checksum matched previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_NO_MATCH', - 'Current checksum does not match previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_PREV_NOT_FOUND', - 'Previous checksum was not found: no comparison possible' -); - -insert into checksum_results -values -( - 'BITSTREAM_INFO_NOT_FOUND', - 'Bitstream info not found' -); - -insert into checksum_results -values -( - 'CHECKSUM_ALGORITHM_INVALID', - 'Invalid checksum algorithm' -); -insert into checksum_results -values -( - 'BITSTREAM_NOT_PROCESSED', - 'Bitstream marked to_be_processed=false' -); -insert into checksum_results -values -( - 'BITSTREAM_MARKED_DELETED', - 'Bitstream marked deleted in bitstream table' -); - --- this will insert into the most recent checksum --- on install all existing bitstreams --- setting all bitstreams already set as --- deleted to not be processed - -insert into most_recent_checksum -( - bitstream_id, - to_be_processed, - expected_checksum, - current_checksum, - last_process_start_date, - last_process_end_date, - checksum_algorithm, - matched_prev_checksum -) -select - bitstream.bitstream_id, - '1', - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' -from bitstream; - --- Update all the deleted checksums --- to not be checked --- because they have since been --- deleted from the system - -update most_recent_checksum -set to_be_processed = 0 -where most_recent_checksum.bitstream_id in ( -select bitstream_id -from bitstream where deleted = '1' ); - --- this will insert into history table --- for the initial start --- we want to tell the users to disregard the initial --- inserts into the checksum history table - -insert into checksum_history -( - bitstream_id, - process_start_date, - process_end_date, - checksum_expected, - checksum_calculated -) -select most_recent_checksum.bitstream_id, - most_recent_checksum.last_process_end_date, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - most_recent_checksum.expected_checksum, - most_recent_checksum.expected_checksum -FROM most_recent_checksum; - --- update the history to indicate that this was --- the first time the software was installed -update checksum_history -set result = 'INVALID_HISTORY'; - - -------------------------------------------------------- --- Table and views for 'browse by subject' functionality -------------------------------------------------------- -CREATE SEQUENCE itemsbysubject_seq; - -------------------------------------------------------- --- ItemsBySubject table -------------------------------------------------------- -CREATE TABLE ItemsBySubject -( - items_by_subject_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - subject VARCHAR2(2000), - sort_subject VARCHAR2(2000) -); - --- index by sort_subject -CREATE INDEX sort_subject_idx on ItemsBySubject(sort_subject); - -------------------------------------------------------- --- CollectionItemsBySubject view -------------------------------------------------------- -CREATE VIEW CollectionItemsBySubject as -SELECT Collection2Item.collection_id, ItemsBySubject.* -FROM ItemsBySubject, Collection2Item -WHERE ItemsBySubject.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsBySubject view -------------------------------------------------------- -CREATE VIEW CommunityItemsBySubject as -SELECT Communities2Item.community_id, ItemsBySubject.* -FROM ItemsBySubject, Communities2Item -WHERE ItemsBySubject.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql deleted file mode 100644 index bb217bd0d18d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql +++ /dev/null @@ -1,142 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Remove NOT NULL restrictions from the checksum columns of most_recent_checksum -ALTER TABLE most_recent_checksum MODIFY expected_checksum null; -ALTER TABLE most_recent_checksum MODIFY current_checksum null; - ------------------------------------------------------- --- New Column language language in EPerson ------------------------------------------------------- - -alter table eperson ADD language VARCHAR2(64); -update eperson set language = 'en'; - --- totally unused column -alter table bundle drop column mets_bitstream_id; - -------------------------------------------------------------------------------- --- Necessary for Configurable Submission functionality: --- Modification to workspaceitem table to support keeping track --- of the last page reached within a step in the Configurable Submission Process -------------------------------------------------------------------------------- -ALTER TABLE workspaceitem ADD page_reached INTEGER; - - -------------------------------------------------------------------------- --- Increase the mimetype field size to support larger types, such as the --- new Word 2007 mimetypes. -------------------------------------------------------------------------- -ALTER TABLE BitstreamFormatRegistry MODIFY (mimetype VARCHAR(256)); - - -------------------------------------------------------------------------- --- Tables to manage cache of item counts for communities and collections -------------------------------------------------------------------------- - -CREATE TABLE collection_item_count ( - collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id), - count INTEGER -); - -CREATE TABLE community_item_count ( - community_id INTEGER PRIMARY KEY REFERENCES community(community_id), - count INTEGER -); - ------------------------------------------------------------------- --- Remove sequences and tables of the old browse system ------------------------------------------------------------------- - -DROP SEQUENCE itemsbyauthor_seq; -DROP SEQUENCE itemsbytitle_seq; -DROP SEQUENCE itemsbydate_seq; -DROP SEQUENCE itemsbydateaccessioned_seq; -DROP SEQUENCE itemsbysubject_seq; - -DROP TABLE ItemsByAuthor CASCADE CONSTRAINTS; -DROP TABLE ItemsByTitle CASCADE CONSTRAINTS; -DROP TABLE ItemsByDate CASCADE CONSTRAINTS; -DROP TABLE ItemsByDateAccessioned CASCADE CONSTRAINTS; -DROP TABLE ItemsBySubject CASCADE CONSTRAINTS; - -DROP TABLE History CASCADE CONSTRAINTS; -DROP TABLE HistoryState CASCADE CONSTRAINTS; - ----------------------------------------------------------------- --- Add indexes for foreign key columns ----------------------------------------------------------------- - -CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id); - -CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id); - -CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id); -CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id); - --- CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id); --- CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id); - -CREATE INDEX item_submitter_fk_idx ON Item(submitter_id); - -CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id); - -CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id); - -CREATE INDEX bundle2bits_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id); - -CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id); - -CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id); - -CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id); -CREATE INDEX collection_template_fk_idx ON Collection(template_item_id); -CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1); -CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2); -CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3); -CREATE INDEX collection_submitter_fk_idx ON Collection(submitter); -CREATE INDEX collection_admin_fk_idx ON Collection(admin); - -CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id); -CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id); - -CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id); -CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id); - -CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id); - -CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id); -CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id); - --- CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id); -CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id); -CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner); - -CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id); -CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id); - -CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id); -CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id); - -CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id); -CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id); - -CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id ); - -CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result ); - -CREATE INDEX ch_result_fk_idx ON checksum_history( result ); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql deleted file mode 100644 index 659ca32983cc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql +++ /dev/null @@ -1,93 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- New Column for Community Admin - Delegated Admin patch (DS-228) ------------------------------------------------------------------- -ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id ); -CREATE INDEX community_admin_fk_idx ON Community(admin); - -------------------------------------------------------------------------- --- DS-236 schema changes for Authority Control of Metadata Values -------------------------------------------------------------------------- -ALTER TABLE MetadataValue - ADD ( authority VARCHAR(100), - confidence INTEGER DEFAULT -1); - --------------------------------------------------------------------------- --- DS-295 CC License being assigned incorrect Mime Type during submission. --------------------------------------------------------------------------- -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License') - WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons'; - -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML') - WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons'; - -------------------------------------------------------------------------- --- DS-260 Cleanup of Owning collection column for template item created --- with the JSPUI after the collection creation -------------------------------------------------------------------------- -UPDATE item SET owning_collection = null WHERE item_id IN - (SELECT template_item_id FROM collection WHERE template_item_id IS NOT null); - --- Recreate restraints with a know name and deferrable option! --- (The previous version of these constraints is dropped by org.dspace.storage.rdbms.migration.V1_5_9__Drop_constraint_for_DSpace_1_6_schema) -ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE; -ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE; -ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE; - - ------------------------------------------------------------------- --- New tables /sequences for the harvester functionality (DS-289) ------------------------------------------------------------------- -CREATE SEQUENCE harvested_collection_seq; -CREATE SEQUENCE harvested_item_seq; - -------------------------------------------------------- --- Create the harvest settings table -------------------------------------------------------- --- Values used by the OAIHarvester to harvest a collection --- HarvestInstance is the DAO class for this table - -CREATE TABLE harvested_collection -( - collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE, - harvest_type INTEGER, - oai_source VARCHAR(256), - oai_set_id VARCHAR(256), - harvest_message VARCHAR2(512), - metadata_config_id VARCHAR(256), - harvest_status INTEGER, - harvest_start_time TIMESTAMP, - last_harvested TIMESTAMP, - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id); - - -CREATE TABLE harvested_item -( - item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE, - last_harvested TIMESTAMP, - oai_id VARCHAR(64), - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql deleted file mode 100644 index f4b2737fb3a8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- Remove unused / obsolete sequence 'dctyperegistry_seq' (DS-729) ------------------------------------------------------------------- -DROP SEQUENCE dctyperegistry_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql deleted file mode 100644 index f96cddbe7fd4..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- New column for bitstream order DS-749 -- -------------------------------------------- -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; - ---Place the sequence id's in the order -UPDATE bundle2bitstream SET bitstream_order=(SELECT sequence_id FROM bitstream WHERE bitstream.bitstream_id=bundle2bitstream.bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql deleted file mode 100644 index 472dc7dc5279..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -ALTER TABLE resourcepolicy - ADD ( - rpname VARCHAR2(30), - rptype VARCHAR2(30), - rpdescription VARCHAR2(100) - ); - - -ALTER TABLE item ADD discoverable NUMBER(1); - -CREATE TABLE versionhistory -( - versionhistory_id INTEGER NOT NULL PRIMARY KEY -); - -CREATE TABLE versionitem -( - versionitem_id INTEGER NOT NULL PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - version_number INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - version_date TIMESTAMP, - version_summary VARCHAR2(255), - versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id) -); - -CREATE SEQUENCE versionitem_seq; -CREATE SEQUENCE versionhistory_seq; - - -------------------------------------------- --- New columns and longer hash for salted password hashing DS-861 -- -------------------------------------------- -ALTER TABLE EPerson modify( password VARCHAR(128)); -ALTER TABLE EPerson ADD salt VARCHAR(32); -ALTER TABLE EPerson ADD digest_algorithm VARCHAR(16); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql deleted file mode 100644 index 8102376906a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql +++ /dev/null @@ -1,88 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- Ensure that discoverable has a sensible default -------------------------------------------- -update item set discoverable=1 WHERE discoverable IS NULL; - -------------------------------------------- --- Add support for DOIs (table and seq.) -- -------------------------------------------- - -CREATE TABLE Doi -( - doi_id INTEGER PRIMARY KEY, - doi VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER, - status INTEGER -); - -CREATE SEQUENCE doi_seq; - --- index by resource id and resource type id -CREATE INDEX doi_resource_id_type_idx ON doi(resource_id, resource_type_id); - -------------------------------------------- --- Table of running web applications for 'dspace version' -- -------------------------------------------- - -CREATE TABLE Webapp -( - webapp_id INTEGER NOT NULL PRIMARY KEY, - AppName VARCHAR2(32), - URL VARCHAR2(1000), - Started TIMESTAMP, - isUI NUMBER(1) -); - -CREATE SEQUENCE webapp_seq; - -------------------------------------------------------- --- DS-824 RequestItem table -------------------------------------------------------- - -CREATE TABLE requestitem -( - requestitem_id INTEGER NOT NULL, - token varchar(48), - item_id INTEGER, - bitstream_id INTEGER, - allfiles NUMBER(1), - request_email VARCHAR2(64), - request_name VARCHAR2(64), - request_date TIMESTAMP, - accept_request NUMBER(1), - decision_date TIMESTAMP, - expires TIMESTAMP, - CONSTRAINT requestitem_pkey PRIMARY KEY (requestitem_id), - CONSTRAINT requestitem_token_key UNIQUE (token) -); - -CREATE SEQUENCE requestitem_seq; - -------------------------------------------------------- --- DS-1655 Disable "Initial Questions" page in Submission UI by default -------------------------------------------------------- -update workspaceitem set multiple_titles=1, published_before=1, multiple_files=1; -update workflowitem set multiple_titles=1, published_before=1, multiple_files=1; - -------------------------------------------------------- --- DS-1811 Removing a collection fails if non-Solr DAO has been used before for item count -------------------------------------------------------- -delete from collection_item_count; -delete from community_item_count; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql deleted file mode 100644 index 6d75905ec980..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql +++ /dev/null @@ -1,64 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Special case of migration, we need to the EPerson schema in order to get our metadata for all queries to work --- but we cannot a DB connection until our database is up to date, so we need to create our registries manually in sql - -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/eperson' as namespace, 'eperson' as short_id FROM dual - WHERE NOT EXISTS (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry WHERE namespace = 'http://dspace.org/eperson' AND short_id = 'eperson'); - - --- Insert eperson.firstname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'firstname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'firstname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.lastname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'lastname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'lastname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.phone -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'phone' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'phone' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.language -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'language' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'language' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert into dc.provenance -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'provenance' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'provenance' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); - --- Insert into dc.rights.license -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'rights', 'license' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry WHERE element = 'rights' AND qualifier='license' AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql deleted file mode 100644 index c86cfe31223e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1945 RequestItem Helpdesk, store request message ------------------------------------------------------- -ALTER TABLE requestitem ADD request_message VARCHAR2(2000); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql deleted file mode 100644 index 8f0cd0d5e1d7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ /dev/null @@ -1,333 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1582 Metadata on all DSpace Objects --- NOTE: This script also has a complimentary Flyway Java Migration --- which drops the "item_id" constraint on metadatavalue --- org.dspace.storage.rdbms.migration.V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint ------------------------------------------------------- -alter table metadatavalue rename column item_id to resource_id; - -alter table metadatavalue MODIFY(resource_id not null); -alter table metadatavalue add resource_type_id integer; -UPDATE metadatavalue SET resource_type_id = 2; -alter table metadatavalue MODIFY(resource_type_id not null); - - - --- --------- --- community --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM community where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM community where not name is null; - -alter table community drop (introductory_text, short_description, side_bar_text, copyright_text, name); - - --- ---------- --- collection --- ---------- - - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM collection where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, -provenance_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not provenance_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, -license AS text_value, -null AS text_lang, -0 AS place -FROM collection where not license is null; - -alter table collection drop (introductory_text, short_description, copyright_text, side_bar_text, name, license, provenance_description); - - --- --------- --- bundle --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bundle_id AS resource_id, -1 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bundle where not name is null; - -alter table bundle drop column name; - - - --- --------- --- bitstream --- --------- - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, -user_format_description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not user_format_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, -source AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not source is null; - -alter table bitstream drop (name, description, user_format_description, source); - - --- --------- --- epersongroup --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_group_id AS resource_id, -6 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM epersongroup where not name is null; - -alter table epersongroup drop column name; - - - --- --------- --- eperson --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, -firstname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not firstname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, -lastname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not lastname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, -phone AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not phone is null; - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, -language AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not language is null; - -alter table eperson drop (firstname, lastname, phone, language); - --- --------- --- dcvalue view --- --------- - -drop view dcvalue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.resource_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1 AND MetadataValue.resource_type_id = 2; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql deleted file mode 100644 index 2e09b807de3b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql deleted file mode 100644 index 9f9836faf471..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3563 Missing database index on metadatavalue.resource_type_id ------------------------------------------------------- --- Create an index on the metadata value resource_type_id column so that it can be searched efficiently. -declare - index_not_exists EXCEPTION; - PRAGMA EXCEPTION_INIT(index_not_exists, -1418); -begin - - execute immediate 'DROP INDEX metadatavalue_type_id_idx'; - exception - when index_not_exists then null; -end; -/ -CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql deleted file mode 100644 index dd857e763df0..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql +++ /dev/null @@ -1,469 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -DROP VIEW community2item; - -CREATE TABLE dspaceobject -( - uuid RAW(16) NOT NULL PRIMARY KEY -); - -CREATE TABLE site -( - uuid RAW(16) NOT NULL PRIMARY KEY REFERENCES dspaceobject(uuid) -); - -ALTER TABLE eperson ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM eperson; -ALTER TABLE eperson ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE eperson MODIFY uuid NOT NULL; -ALTER TABLE eperson ADD CONSTRAINT eperson_id_unique PRIMARY KEY (uuid); -UPDATE eperson SET require_certificate = '0' WHERE require_certificate IS NULL; -UPDATE eperson SET self_registered = '0' WHERE self_registered IS NULL; - - - -UPDATE metadatavalue SET text_value='Administrator' - WHERE resource_type_id=6 AND resource_id=1; -UPDATE metadatavalue SET text_value='Anonymous' - WHERE resource_type_id=6 AND resource_id=0; - -ALTER TABLE epersongroup ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM epersongroup; -ALTER TABLE epersongroup ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE epersongroup MODIFY uuid NOT NULL; -ALTER TABLE epersongroup ADD CONSTRAINT epersongroup_id_unique PRIMARY KEY (uuid); - -ALTER TABLE item ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM item; -ALTER TABLE item ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE item MODIFY uuid NOT NULL; -ALTER TABLE item ADD CONSTRAINT item_id_unique PRIMARY KEY (uuid); - -ALTER TABLE community ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM community; -ALTER TABLE community ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE community MODIFY uuid NOT NULL; -ALTER TABLE community ADD CONSTRAINT community_id_unique PRIMARY KEY (uuid); - - -ALTER TABLE collection ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM collection; -ALTER TABLE collection ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE collection MODIFY uuid NOT NULL; -ALTER TABLE collection ADD CONSTRAINT collection_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bundle ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bundle; -ALTER TABLE bundle ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bundle MODIFY uuid NOT NULL; -ALTER TABLE bundle ADD CONSTRAINT bundle_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bitstream ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bitstream; -ALTER TABLE bitstream ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bitstream MODIFY uuid NOT NULL; -ALTER TABLE bitstream ADD CONSTRAINT bitstream_id_unique PRIMARY KEY (uuid); -UPDATE bitstream SET sequence_id = -1 WHERE sequence_id IS NULL; -UPDATE bitstream SET size_bytes = -1 WHERE size_bytes IS NULL; -UPDATE bitstream SET deleted = '0' WHERE deleted IS NULL; -UPDATE bitstream SET store_number = -1 WHERE store_number IS NULL; - --- Migrate EPersonGroup2EPerson table -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE EPersonGroup2EPerson ADD eperson_group_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE EPersonGroup2EPerson ADD eperson_id RAW(16) REFERENCES Eperson(uuid); -CREATE INDEX EpersonGroup2Eperson_group on EpersonGroup2Eperson(eperson_group_id); -CREATE INDEX EpersonGroup2Eperson_person on EpersonGroup2Eperson(eperson_id); -UPDATE EPersonGroup2EPerson SET eperson_group_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE EPersonGroup2EPerson.eperson_group_legacy_id = EPersonGroup.eperson_group_id); -UPDATE EPersonGroup2EPerson SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE EPersonGroup2EPerson.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_group_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_legacy_id; -ALTER TABLE epersongroup2eperson DROP COLUMN id; -ALTER TABLE EPersonGroup2EPerson add CONSTRAINT EPersonGroup2EPerson_unique primary key (eperson_group_id,eperson_id); - --- Migrate GROUP2GROUP table -ALTER TABLE Group2Group RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2Group RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2Group ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2Group ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2Group_parent on Group2Group(parent_id); -CREATE INDEX Group2Group_child on Group2Group(child_id); -UPDATE Group2Group SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2Group.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2Group SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2Group.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2Group MODIFY parent_id NOT NULL; -ALTER TABLE Group2Group MODIFY child_id NOT NULL; -ALTER TABLE Group2Group DROP COLUMN parent_legacy_id; -ALTER TABLE Group2Group DROP COLUMN child_legacy_id; -ALTER TABLE Group2Group DROP COLUMN id; -ALTER TABLE Group2Group add CONSTRAINT Group2Group_unique primary key (parent_id,child_id); - --- Migrate collection2item -ALTER TABLE Collection2Item RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Collection2Item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE Collection2Item ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE Collection2Item ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX Collecion2Item_collection on Collection2Item(collection_id); -CREATE INDEX Collecion2Item_item on Collection2Item(item_id); -UPDATE Collection2Item SET collection_id = (SELECT Collection.uuid FROM Collection WHERE Collection2Item.collection_legacy_id = Collection.collection_id); -UPDATE Collection2Item SET item_id = (SELECT Item.uuid FROM Item WHERE Collection2Item.item_legacy_id = Item.item_id); -ALTER TABLE Collection2Item MODIFY collection_id NOT NULL; -ALTER TABLE Collection2Item MODIFY item_id NOT NULL; -ALTER TABLE Collection2Item DROP COLUMN collection_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN item_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN id; --- Magic query that will delete all duplicate collection item_id references from the database (if we don't do this the primary key creation will fail) -DELETE FROM collection2item WHERE rowid NOT IN (SELECT MIN(rowid) FROM collection2item GROUP BY collection_id,item_id); -ALTER TABLE Collection2Item add CONSTRAINT collection2item_unique primary key (collection_id,item_id); - --- Migrate Community2Community -ALTER TABLE Community2Community RENAME COLUMN parent_comm_id to parent_legacy_id; -ALTER TABLE Community2Community RENAME COLUMN child_comm_id to child_legacy_id; -ALTER TABLE Community2Community ADD parent_comm_id RAW(16) REFERENCES Community(uuid); -ALTER TABLE Community2Community ADD child_comm_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX Community2Community_parent on Community2Community(parent_comm_id); -CREATE INDEX Community2Community_child on Community2Community(child_comm_id); -UPDATE Community2Community SET parent_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.parent_legacy_id = Community.community_id); -UPDATE Community2Community SET child_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.child_legacy_id = Community.community_id); -ALTER TABLE Community2Community MODIFY parent_comm_id NOT NULL; -ALTER TABLE Community2Community MODIFY child_comm_id NOT NULL; -ALTER TABLE Community2Community DROP COLUMN parent_legacy_id; -ALTER TABLE Community2Community DROP COLUMN child_legacy_id; -ALTER TABLE Community2Community DROP COLUMN id; -ALTER TABLE Community2Community add CONSTRAINT Community2Community_unique primary key (parent_comm_id,child_comm_id); - --- Migrate community2collection -ALTER TABLE community2collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE community2collection RENAME COLUMN community_id to community_legacy_id; -ALTER TABLE community2collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE community2collection ADD community_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX community2collection_collectio on community2collection(collection_id); -CREATE INDEX community2collection_community on community2collection(community_id); -UPDATE community2collection SET collection_id = (SELECT Collection.uuid FROM Collection WHERE community2collection.collection_legacy_id = Collection.collection_id); -UPDATE community2collection SET community_id = (SELECT Community.uuid FROM Community WHERE community2collection.community_legacy_id = Community.community_id); -ALTER TABLE community2collection MODIFY collection_id NOT NULL; -ALTER TABLE community2collection MODIFY community_id NOT NULL; -ALTER TABLE community2collection DROP COLUMN collection_legacy_id; -ALTER TABLE community2collection DROP COLUMN community_legacy_id; -ALTER TABLE community2collection DROP COLUMN id; -ALTER TABLE community2collection add CONSTRAINT community2collection_unique primary key (collection_id,community_id); - - --- Migrate Group2GroupCache table -ALTER TABLE Group2GroupCache RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2GroupCache RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2GroupCache ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2GroupCache ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2GroupCache_parent on Group2GroupCache(parent_id); -CREATE INDEX Group2GroupCache_child on Group2GroupCache(child_id); -UPDATE Group2GroupCache SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2GroupCache SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2GroupCache MODIFY parent_id NOT NULL; -ALTER TABLE Group2GroupCache MODIFY child_id NOT NULL; -ALTER TABLE Group2GroupCache DROP COLUMN parent_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN child_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN id; -ALTER TABLE Group2GroupCache add CONSTRAINT Group2GroupCache_unique primary key (parent_id,child_id); - --- Migrate Item2Bundle -ALTER TABLE item2bundle RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE item2bundle RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE item2bundle ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE item2bundle ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX item2bundle_bundle on item2bundle(bundle_id); -CREATE INDEX item2bundle_item on item2bundle(item_id); -UPDATE item2bundle SET bundle_id = (SELECT Bundle.uuid FROM Bundle WHERE item2bundle.bundle_legacy_id = Bundle.bundle_id); -UPDATE item2bundle SET item_id = (SELECT Item.uuid FROM Item WHERE item2bundle.item_legacy_id = Item.item_id); -ALTER TABLE item2bundle MODIFY bundle_id NOT NULL; -ALTER TABLE item2bundle MODIFY item_id NOT NULL; -ALTER TABLE item2bundle DROP COLUMN bundle_legacy_id; -ALTER TABLE item2bundle DROP COLUMN item_legacy_id; -ALTER TABLE item2bundle DROP COLUMN id; -ALTER TABLE item2bundle add CONSTRAINT item2bundle_unique primary key (bundle_id,item_id); - ---Migrate Bundle2Bitsteam -ALTER TABLE bundle2bitstream RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE bundle2bitstream ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE bundle2bitstream ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle2bitstream_bundle on bundle2bitstream(bundle_id); -CREATE INDEX bundle2bitstream_bitstream on bundle2bitstream(bitstream_id); -UPDATE bundle2bitstream SET bundle_id = (SELECT bundle.uuid FROM bundle WHERE bundle2bitstream.bundle_legacy_id = bundle.bundle_id); -UPDATE bundle2bitstream SET bitstream_id = (SELECT bitstream.uuid FROM bitstream WHERE bundle2bitstream.bitstream_legacy_id = bitstream.bitstream_id); -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_order to bitstream_order_legacy; -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; -MERGE INTO bundle2bitstream dst -USING ( SELECT ROWID AS r_id - , ROW_NUMBER () OVER ( PARTITION BY bundle_id - ORDER BY bitstream_order_legacy, bitstream_id - ) AS new_order - FROM bundle2bitstream - ) src -ON (dst.ROWID = src.r_id) -WHEN MATCHED THEN UPDATE -SET dst.bitstream_order = (src.new_order-1) -; -ALTER TABLE bundle2bitstream MODIFY bundle_id NOT NULL; -ALTER TABLE bundle2bitstream MODIFY bitstream_id NOT NULL; -ALTER TABLE bundle2bitstream DROP COLUMN bundle_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN bitstream_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN id; -ALTER TABLE bundle2bitstream add CONSTRAINT bundle2bitstream_unique primary key (bitstream_id,bundle_id,bitstream_order); - - --- Migrate item -ALTER TABLE item RENAME COLUMN submitter_id to submitter_id_legacy_id; -ALTER TABLE item ADD submitter_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX item_submitter on item(submitter_id); -UPDATE item SET submitter_id = (SELECT eperson.uuid FROM eperson WHERE item.submitter_id_legacy_id = eperson.eperson_id); -ALTER TABLE item DROP COLUMN submitter_id_legacy_id; - -ALTER TABLE item RENAME COLUMN owning_collection to owning_collection_legacy; -ALTER TABLE item ADD owning_collection RAW(16) REFERENCES Collection(uuid); -CREATE INDEX item_collection on item(owning_collection); -UPDATE item SET owning_collection = (SELECT Collection.uuid FROM Collection WHERE item.owning_collection_legacy = collection.collection_id); -ALTER TABLE item DROP COLUMN owning_collection_legacy; - -UPDATE item SET in_archive = '0' WHERE in_archive IS NULL; -UPDATE item SET discoverable = '0' WHERE discoverable IS NULL; -UPDATE item SET withdrawn = '0' WHERE withdrawn IS NULL; - --- Migrate bundle -ALTER TABLE bundle RENAME COLUMN primary_bitstream_id to primary_bitstream_legacy_id; -ALTER TABLE bundle ADD primary_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle_primary on bundle(primary_bitstream_id); -UPDATE bundle SET primary_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE bundle.primary_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE bundle DROP COLUMN primary_bitstream_legacy_id; - - --- Migrate community references -ALTER TABLE Community RENAME COLUMN admin to admin_legacy; -ALTER TABLE Community ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Community_admin on Community(admin); -UPDATE Community SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Community.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Community DROP COLUMN admin_legacy; - -ALTER TABLE Community RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Community ADD logo_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX Community_bitstream on Community(logo_bitstream_id); -UPDATE Community SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Community.logo_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE Community DROP COLUMN logo_bitstream_legacy_id; - - ---Migrate Collection references -ALTER TABLE Collection RENAME COLUMN workflow_step_1 to workflow_step_1_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_2 to workflow_step_2_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_3 to workflow_step_3_legacy; -ALTER TABLE Collection RENAME COLUMN submitter to submitter_legacy; -ALTER TABLE Collection RENAME COLUMN template_item_id to template_item_legacy_id; -ALTER TABLE Collection RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Collection RENAME COLUMN admin to admin_legacy; -ALTER TABLE Collection ADD workflow_step_1 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_2 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_3 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD submitter RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD template_item_id RAW(16); -ALTER TABLE Collection ADD logo_bitstream_id RAW(16); -ALTER TABLE Collection ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Collection_workflow1 on Collection(workflow_step_1); -CREATE INDEX Collection_workflow2 on Collection(workflow_step_2); -CREATE INDEX Collection_workflow3 on Collection(workflow_step_3); -CREATE INDEX Collection_submitter on Collection(submitter); -CREATE INDEX Collection_template on Collection(template_item_id); -CREATE INDEX Collection_bitstream on Collection(logo_bitstream_id); -UPDATE Collection SET workflow_step_1 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_1_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_2 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_2_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_3 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_3_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET submitter = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.submitter_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET template_item_id = (SELECT Item.uuid FROM Item WHERE Collection.template_item_legacy_id = Item.item_id); -UPDATE Collection SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Collection.logo_bitstream_legacy_id = Bitstream.bitstream_id); -UPDATE Collection SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Collection DROP COLUMN workflow_step_1_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_2_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_3_legacy; -ALTER TABLE Collection DROP COLUMN submitter_legacy; -ALTER TABLE Collection DROP COLUMN template_item_legacy_id; -ALTER TABLE Collection DROP COLUMN logo_bitstream_legacy_id; -ALTER TABLE Collection DROP COLUMN admin_legacy; - - --- Migrate resource policy references -ALTER TABLE ResourcePolicy RENAME COLUMN eperson_id to eperson_id_legacy_id; -ALTER TABLE ResourcePolicy ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX resourcepolicy_person on resourcepolicy(eperson_id); -UPDATE ResourcePolicy SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.eperson_id_legacy_id = eperson.eperson_id); -ALTER TABLE ResourcePolicy DROP COLUMN eperson_id_legacy_id; - -ALTER TABLE ResourcePolicy RENAME COLUMN epersongroup_id to epersongroup_id_legacy_id; -ALTER TABLE ResourcePolicy ADD epersongroup_id RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX resourcepolicy_group on resourcepolicy(epersongroup_id); -UPDATE ResourcePolicy SET epersongroup_id = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.epersongroup_id_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE ResourcePolicy DROP COLUMN epersongroup_id_legacy_id; - -ALTER TABLE ResourcePolicy ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -UPDATE ResourcePolicy SET dspace_object = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.resource_id = eperson.eperson_id AND ResourcePolicy.resource_type_id = 7) WHERE ResourcePolicy.resource_type_id = 7; -UPDATE ResourcePolicy SET dspace_object = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.resource_id = epersongroup.eperson_group_id AND ResourcePolicy.resource_type_id = 6) WHERE ResourcePolicy.resource_type_id = 6; -UPDATE ResourcePolicy SET dspace_object = (SELECT community.uuid FROM community WHERE ResourcePolicy.resource_id = community.community_id AND ResourcePolicy.resource_type_id = 4) WHERE ResourcePolicy.resource_type_id = 4; -UPDATE ResourcePolicy SET dspace_object = (SELECT collection.uuid FROM collection WHERE ResourcePolicy.resource_id = collection.collection_id AND ResourcePolicy.resource_type_id = 3) WHERE ResourcePolicy.resource_type_id = 3; -UPDATE ResourcePolicy SET dspace_object = (SELECT item.uuid FROM item WHERE ResourcePolicy.resource_id = item.item_id AND ResourcePolicy.resource_type_id = 2) WHERE ResourcePolicy.resource_type_id = 2; -UPDATE ResourcePolicy SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE ResourcePolicy.resource_id = bundle.bundle_id AND ResourcePolicy.resource_type_id = 1) WHERE ResourcePolicy.resource_type_id = 1; -UPDATE ResourcePolicy SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE ResourcePolicy.resource_id = bitstream.bitstream_id AND ResourcePolicy.resource_type_id = 0) WHERE ResourcePolicy.resource_type_id = 0; -UPDATE resourcepolicy SET resource_type_id = -1 WHERE resource_type_id IS NULL; -UPDATE resourcepolicy SET action_id = -1 WHERE action_id IS NULL; - - --- Migrate Subscription -ALTER TABLE Subscription RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE Subscription ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX Subscription_person on Subscription(eperson_id); -UPDATE Subscription SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE Subscription.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE Subscription DROP COLUMN eperson_legacy_id; - -ALTER TABLE Subscription RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Subscription ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX Subscription_collection on Subscription(collection_id); -UPDATE Subscription SET collection_id = (SELECT collection.uuid FROM collection WHERE Subscription.collection_legacy_id = collection.collection_id); -ALTER TABLE Subscription DROP COLUMN collection_legacy_id; - - --- Migrate versionitem -ALTER TABLE versionitem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE versionitem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX versionitem_person on versionitem(eperson_id); -UPDATE versionitem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE versionitem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE versionitem DROP COLUMN eperson_legacy_id; - -ALTER TABLE versionitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE versionitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX versionitem_item on versionitem(item_id); -UPDATE versionitem SET item_id = (SELECT item.uuid FROM item WHERE versionitem.item_legacy_id = item.item_id); -ALTER TABLE versionitem DROP COLUMN item_legacy_id; -UPDATE versionitem SET version_number = -1 WHERE version_number IS NULL; - --- Migrate handle table -ALTER TABLE handle RENAME COLUMN resource_id to resource_legacy_id; -ALTER TABLE handle ADD resource_id RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX handle_object on handle(resource_id); -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4); -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3); -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2); - --- Migrate metadata value table -DROP VIEW dcvalue; - -ALTER TABLE metadatavalue ADD dspace_object_id RAW(16) REFERENCES dspaceobject(uuid); --- CREATE INDEX metadatavalue_field on metadatavalue(metadata_field_id); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); -UPDATE metadatavalue SET dspace_object_id = (SELECT eperson.uuid FROM eperson WHERE metadatavalue.resource_id = eperson.eperson_id AND metadatavalue.resource_type_id = 7) WHERE metadatavalue.resource_type_id= 7; -UPDATE metadatavalue SET dspace_object_id = (SELECT epersongroup.uuid FROM epersongroup WHERE metadatavalue.resource_id = epersongroup.eperson_group_id AND metadatavalue.resource_type_id = 6) WHERE metadatavalue.resource_type_id= 6; -UPDATE metadatavalue SET dspace_object_id = (SELECT community.uuid FROM community WHERE metadatavalue.resource_id = community.community_id AND metadatavalue.resource_type_id = 4) WHERE metadatavalue.resource_type_id= 4; -UPDATE metadatavalue SET dspace_object_id = (SELECT collection.uuid FROM collection WHERE metadatavalue.resource_id = collection.collection_id AND metadatavalue.resource_type_id = 3) WHERE metadatavalue.resource_type_id= 3; -UPDATE metadatavalue SET dspace_object_id = (SELECT item.uuid FROM item WHERE metadatavalue.resource_id = item.item_id AND metadatavalue.resource_type_id = 2) WHERE metadatavalue.resource_type_id= 2; -UPDATE metadatavalue SET dspace_object_id = (SELECT bundle.uuid FROM bundle WHERE metadatavalue.resource_id = bundle.bundle_id AND metadatavalue.resource_type_id = 1) WHERE metadatavalue.resource_type_id= 1; -UPDATE metadatavalue SET dspace_object_id = (SELECT bitstream.uuid FROM bitstream WHERE metadatavalue.resource_id = bitstream.bitstream_id AND metadatavalue.resource_type_id = 0) WHERE metadatavalue.resource_type_id= 0; -DROP INDEX metadatavalue_item_idx; -DROP INDEX metadatavalue_item_idx2; -ALTER TABLE metadatavalue DROP COLUMN resource_id; -ALTER TABLE metadatavalue DROP COLUMN resource_type_id; -UPDATE MetadataValue SET confidence = -1 WHERE confidence IS NULL; -UPDATE metadatavalue SET place = -1 WHERE place IS NULL; - --- Alter harvested item -ALTER TABLE harvested_item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE harvested_item ADD item_id RAW(16) REFERENCES item(uuid); -CREATE INDEX harvested_item_item on harvested_item(item_id); -UPDATE harvested_item SET item_id = (SELECT item.uuid FROM item WHERE harvested_item.item_legacy_id = item.item_id); -ALTER TABLE harvested_item DROP COLUMN item_legacy_id; - --- Alter harvested collection -ALTER TABLE harvested_collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE harvested_collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX harvested_collection_collectio on harvested_collection(collection_id); -UPDATE harvested_collection SET collection_id = (SELECT collection.uuid FROM collection WHERE harvested_collection.collection_legacy_id = collection.collection_id); -ALTER TABLE harvested_collection DROP COLUMN collection_legacy_id; - -UPDATE harvested_collection SET harvest_type = -1 WHERE harvest_type IS NULL; -UPDATE harvested_collection SET harvest_status = -1 WHERE harvest_status IS NULL; - - ---Alter workspaceitem -ALTER TABLE workspaceitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workspaceitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX workspaceitem_item on workspaceitem(item_id); -UPDATE workspaceitem SET item_id = (SELECT item.uuid FROM item WHERE workspaceitem.item_legacy_id = item.item_id); -ALTER TABLE workspaceitem DROP COLUMN item_legacy_id; - -ALTER TABLE workspaceitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workspaceitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX workspaceitem_coll on workspaceitem(collection_id); -UPDATE workspaceitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workspaceitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workspaceitem DROP COLUMN collection_legacy_id; - -UPDATE workspaceitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workspaceitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workspaceitem SET multiple_files = '0' WHERE multiple_files IS NULL; -UPDATE workspaceitem SET stage_reached = -1 WHERE stage_reached IS NULL; -UPDATE workspaceitem SET page_reached = -1 WHERE page_reached IS NULL; - ---Alter epersongroup2workspaceitem -ALTER TABLE epersongroup2workspaceitem RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE epersongroup2workspaceitem ADD eperson_group_id RAW(16) REFERENCES epersongroup(uuid); -CREATE INDEX epersongroup2workspaceitem_gro on epersongroup2workspaceitem(eperson_group_id); -UPDATE epersongroup2workspaceitem SET eperson_group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE epersongroup2workspaceitem.eperson_group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE epersongroup2workspaceitem DROP COLUMN eperson_group_legacy_id; - -ALTER TABLE epersongroup2workspaceitem DROP COLUMN id; -ALTER TABLE epersongroup2workspaceitem MODIFY workspace_item_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem MODIFY eperson_group_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem add CONSTRAINT epersongroup2wsitem_unqiue primary key (workspace_item_id,eperson_group_id); - ---Alter most_recent_checksum -ALTER TABLE most_recent_checksum RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE most_recent_checksum ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX most_recent_checksum_bitstream on most_recent_checksum(bitstream_id); -UPDATE most_recent_checksum SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE most_recent_checksum.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE most_recent_checksum DROP COLUMN bitstream_legacy_id; - -UPDATE most_recent_checksum SET to_be_processed = '0' WHERE to_be_processed IS NULL; -UPDATE most_recent_checksum SET matched_prev_checksum = '0' WHERE matched_prev_checksum IS NULL; - ---Alter checksum_history -ALTER TABLE checksum_history RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE checksum_history ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX checksum_history_bitstream on checksum_history(bitstream_id); -UPDATE checksum_history SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE checksum_history.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE checksum_history DROP COLUMN bitstream_legacy_id; - -RENAME checksum_history_seq TO checksum_history_check_id_seq; - ---Alter table doi -ALTER TABLE doi ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX doi_object on doi(dspace_object); -UPDATE doi SET dspace_object = (SELECT community.uuid FROM community WHERE doi.resource_id = community.community_id AND doi.resource_type_id = 4) WHERE doi.resource_type_id = 4; -UPDATE doi SET dspace_object = (SELECT collection.uuid FROM collection WHERE doi.resource_id = collection.collection_id AND doi.resource_type_id = 3) WHERE doi.resource_type_id = 3; -UPDATE doi SET dspace_object = (SELECT item.uuid FROM item WHERE doi.resource_id = item.item_id AND doi.resource_type_id = 2) WHERE doi.resource_type_id = 2; -UPDATE doi SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE doi.resource_id = bundle.bundle_id AND doi.resource_type_id = 1) WHERE doi.resource_type_id = 1; -UPDATE doi SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE doi.resource_id = bitstream.bitstream_id AND doi.resource_type_id = 0) WHERE doi.resource_type_id = 0; - ---Update table bitstreamformatregistry -UPDATE bitstreamformatregistry SET support_level = -1 WHERE support_level IS NULL; - ---Update table requestitem -UPDATE requestitem SET allfiles = '0' WHERE allfiles IS NULL; -UPDATE requestitem SET accept_request = '0' WHERE accept_request IS NULL; - ---Update table webapp -UPDATE webapp SET isui = -1 WHERE isui IS NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql deleted file mode 100644 index 8f1a7ad157a2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS_3378 Lost oracle indexes ------------------------------------------------------- -CREATE UNIQUE INDEX eperson_eperson on eperson(eperson_id); -CREATE UNIQUE INDEX epersongroup_eperson_group on epersongroup(eperson_group_id); -CREATE UNIQUE INDEX community_community on community(community_id); -CREATE UNIQUE INDEX collection_collection on collection(collection_id); -CREATE UNIQUE INDEX item_item on item(item_id); -CREATE UNIQUE INDEX bundle_bundle on bundle(bundle_id); -CREATE UNIQUE INDEX bitstream_bitstream on bitstream(bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql deleted file mode 100644 index 8ad6f7fcd247..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3024 Invent "permanent" groups ------------------------------------------------------- - -ALTER TABLE epersongroup - ADD (permanent NUMBER(1) DEFAULT 0); -UPDATE epersongroup SET permanent = 1 - WHERE uuid IN ( - SELECT dspace_object_id - FROM metadataschemaregistry s - JOIN metadatafieldregistry f USING (metadata_schema_id) - JOIN metadatavalue v USING (metadata_field_id) - WHERE s.short_id = 'dc' - AND f.element = 'title' - AND f.qualifier IS NULL - AND dbms_lob.compare(v.text_value, 'Administrator') = 0 OR dbms_lob.compare(v.text_value,'Anonymous') = 0 - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql deleted file mode 100644 index 18cb4a50841d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3024 extremely slow searching when logged in as admin ---------------------------------------------------------------- --- This script will put the group name on the epersongroup --- record itself for performance reasons. It will also make --- sure that a group name is unique (so that for example no two --- Administrator groups can be created). ---------------------------------------------------------------- - -ALTER TABLE epersongroup -ADD name VARCHAR2(250); - -CREATE UNIQUE INDEX epersongroup_unique_idx_name on epersongroup(name); - -UPDATE epersongroup -SET name = -(SELECT text_value - FROM metadatavalue v - JOIN metadatafieldregistry field on v.metadata_field_id = field.metadata_field_id - JOIN metadataschemaregistry s ON field.metadata_schema_id = s.metadata_schema_id - WHERE s.short_id = 'dc' AND element = 'title' AND qualifier IS NULL - AND v.dspace_object_id = epersongroup.uuid); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql deleted file mode 100644 index e0a103749c2b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1955 resize rpdescription for embargo reason ------------------------------------------------------- - --- We cannot alter type between varchar2 & clob directly so an in between column is required -ALTER TABLE resourcepolicy ADD rpdescription_clob CLOB; -UPDATE resourcepolicy SET rpdescription_clob=rpdescription, rpdescription=null; -ALTER TABLE resourcepolicy DROP COLUMN rpdescription; -ALTER TABLE resourcepolicy RENAME COLUMN rpdescription_clob TO rpdescription; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql deleted file mode 100644 index 7b13d10b6d4f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql +++ /dev/null @@ -1,46 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3086 OAI Harvesting performance ---------------------------------------------------------------- --- This script will create indexes on the key fields of the --- metadataschemaregistry and metadatafieldregistry tables to --- increase the performance of the queries. It will also add --- "ON DELETE CASCADE" to improve the performance of Item deletion. ---------------------------------------------------------------- - -CREATE UNIQUE INDEX metadataschema_idx_short_id on metadataschemaregistry(short_id); - -CREATE INDEX metadatafield_idx_elem_qual on metadatafieldregistry(element, qualifier); - -CREATE INDEX resourcepolicy_idx_rptype on resourcepolicy(rptype); - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE RESOURCEPOLICY ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE RESOURCEPOLICY SET DSPACE_OBJECT_NEW = DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY DROP COLUMN DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT; - -ALTER TABLE RESOURCEPOLICY -ADD CONSTRAINT RESOURCEPOLICY_DSPACE_OBJ_FK -FOREIGN KEY (DSPACE_OBJECT) -REFERENCES dspaceobject(uuid) -ON DELETE CASCADE; - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE METADATAVALUE ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE METADATAVALUE SET DSPACE_OBJECT_NEW = DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE DROP COLUMN DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT_ID; - -ALTER TABLE METADATAVALUE -ADD CONSTRAINT METADATAVALUE_DSPACE_OBJECT_FK -FOREIGN KEY (DSPACE_OBJECT_ID) -REFERENCES DSPACEOBJECT(UUID) -ON DELETE CASCADE; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql deleted file mode 100644 index a1b303f0365a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3125 Submitters cannot delete bistreams of workspaceitems ---------------------------------------------------------------- --- This script will add delete rights on all bundles/bitstreams --- for people who already have REMOVE rights. --- In previous versions REMOVE rights was enough to ensure that --- you could delete an object. ---------------------------------------------------------------- -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, -rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) -SELECT -resourcepolicy_seq.nextval AS policy_id, -resource_type_id, -resource_id, --- Insert the Constants.DELETE action -2 AS action_id, -start_date, -end_date, -rpname, -rptype, -rpdescription, -eperson_id, -epersongroup_id, -dspace_object -FROM resourcepolicy WHERE action_id=4 AND (resource_type_id=0 OR resource_type_id=1 OR resource_type_id=2); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql deleted file mode 100644 index 2ba3517e1988..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3168 Embargo request Unknown Entity RequestItem ---------------------------------------------------------------- --- convert the item_id and bitstream_id columns from integer to UUID ---------------------------------------------------------------- -ALTER TABLE requestitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE requestitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX requestitem_item on requestitem(item_id); -UPDATE requestitem SET item_id = (SELECT item.uuid FROM item WHERE requestitem.item_legacy_id = item.item_id); -ALTER TABLE requestitem DROP COLUMN item_legacy_id; - -ALTER TABLE requestitem RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE requestitem ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX requestitem_bitstream on requestitem(bitstream_id); -UPDATE requestitem SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE requestitem.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE requestitem DROP COLUMN bitstream_legacy_id; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql deleted file mode 100644 index 74783974468c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2775 Drop unused sequences ------------------------------------------------------- - -DROP SEQUENCE bitstream_seq; -DROP SEQUENCE bundle2bitstream_seq; -DROP SEQUENCE bundle_seq; -DROP SEQUENCE collection2item_seq; -DROP SEQUENCE collection_seq; -DROP SEQUENCE community2collection_seq; -DROP SEQUENCE community2community_seq; -DROP SEQUENCE community_seq; -DROP SEQUENCE dcvalue_seq; -DROP SEQUENCE eperson_seq; -DROP SEQUENCE epersongroup2eperson_seq; -DROP SEQUENCE epersongroup2workspaceitem_seq; -DROP SEQUENCE epersongroup_seq; -DROP SEQUENCE group2group_seq; -DROP SEQUENCE group2groupcache_seq; -DROP SEQUENCE historystate_seq; -DROP SEQUENCE item2bundle_seq; -DROP SEQUENCE item_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql deleted file mode 100644 index 96f125f78b61..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql +++ /dev/null @@ -1,44 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------------------------------------- --- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles --- can be minted from 'handle_seq' ----------------------------------------------------------------------------------- --- Create a new sequence for 'handle_id' column. --- The role of this sequence is to simply provide a unique internal ID to the database. -CREATE SEQUENCE handle_id_seq; --- Initialize new 'handle_id_seq' to the maximum value of 'handle_id' -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(handle_id) INTO curr FROM handle; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1); -END; -/ - --- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column, --- as this sequence is used to mint new Handles. --- Code borrowed from update-sequences.sql and updateseq.sql -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$'); - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1); -END; -/ \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql deleted file mode 100644 index e1220c8c7cce..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql deleted file mode 100644 index 5c3c3842aaea..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3410 ---------------------------------------------------------------- --- This script will create lost indexes ---------------------------------------------------------------- - -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql deleted file mode 100644 index 47b2d18be8a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql +++ /dev/null @@ -1,16 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql ------------------------------------------------------- - -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4; -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3; -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2; - \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql deleted file mode 100644 index 30cfae91c83a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------------------------------------------------- --- This adds an extra column to the eperson table where we save a salt for stateless authentication ------------------------------------------------------------------------------------------------------------- -ALTER TABLE eperson ADD session_salt varchar(32); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql deleted file mode 100644 index fc1c0b2e2319..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql +++ /dev/null @@ -1,65 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the dspace 7 entities usage -------------------------------------------------------------- -CREATE SEQUENCE entity_type_id_seq; -CREATE SEQUENCE relationship_type_id_seq; -CREATE SEQUENCE relationship_id_seq; - -CREATE TABLE entity_type -( - id INTEGER NOT NULL PRIMARY KEY, - label varchar(32) UNIQUE NOT NULL -); - -CREATE TABLE relationship_type -( - id INTEGER NOT NULL PRIMARY KEY, - left_type INTEGER NOT NULL, - right_type INTEGER NOT NULL, - left_label varchar(32) NOT NULL, - right_label varchar(32) NOT NULL, - left_min_cardinality INTEGER, - left_max_cardinality INTEGER, - right_min_cardinality INTEGER, - right_max_cardinality INTEGER, - FOREIGN KEY (left_type) REFERENCES entity_type(id), - FOREIGN KEY (right_type) REFERENCES entity_type(id), - CONSTRAINT u_relationship_type_constraint UNIQUE (left_type, right_type, left_label, right_label) - -); - -CREATE TABLE relationship -( - id INTEGER NOT NULL PRIMARY KEY, - left_id raw(16) NOT NULL REFERENCES item(uuid), - type_id INTEGER NOT NULL REFERENCES relationship_type(id), - right_id raw(16) NOT NULL REFERENCES item(uuid), - left_place INTEGER, - right_place INTEGER, - CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id) - -); - -CREATE INDEX entity_type_label_idx ON entity_type(label); -CREATE INDEX rl_ty_by_left_type_idx ON relationship_type(left_type); -CREATE INDEX rl_ty_by_right_type_idx ON relationship_type(right_type); -CREATE INDEX rl_ty_by_left_label_idx ON relationship_type(left_label); -CREATE INDEX rl_ty_by_right_label_idx ON relationship_type(right_label); -CREATE INDEX relationship_by_left_id_idx ON relationship(left_id); -CREATE INDEX relationship_by_right_id_idx ON relationship(right_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql deleted file mode 100644 index 68ed690f89e8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ----------------------------------------------------------------------------------------------------------------- --- This adds TYPE_INHERITED to all old archived items permission due to the change on resource policy management ----------------------------------------------------------------------------------------------------------------- -UPDATE resourcepolicy set rptype = 'TYPE_INHERITED' - where resource_type_id = 2 and rptype is null - and dspace_object in ( - select uuid from item where in_archive = 1 - ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql deleted file mode 100644 index b23170f43732..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-4239 Migrate the workflow.xml to spring ---------------------------------------------------------------- --- This script will rename the default workflow "default" name --- to the new "defaultWorkflow" identifier ---------------------------------------------------------------- - -UPDATE cwf_pooltask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; -UPDATE cwf_claimtask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql deleted file mode 100644 index cebae09f651c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns leftwardValue and rightwardValue in table relationship --- Rename columns left_label and right_label to leftward_type and rightward_type ------------------------------------------------------------------------------------ - -ALTER TABLE relationship ADD leftward_value VARCHAR2(50); -ALTER TABLE relationship ADD rightward_value VARCHAR2(50); - -ALTER TABLE relationship_type RENAME COLUMN left_label TO leftward_type; -ALTER TABLE relationship_type RENAME COLUMN right_label TO rightward_type; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql deleted file mode 100644 index a7015e3033bf..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql +++ /dev/null @@ -1,40 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== -CREATE SEQUENCE process_id_seq; - -CREATE TABLE process -( - process_id INTEGER NOT NULL PRIMARY KEY, - user_id RAW(16) NOT NULL, - start_time TIMESTAMP, - finished_time TIMESTAMP, - creation_time TIMESTAMP NOT NULL, - script VARCHAR(256) NOT NULL, - status VARCHAR(32), - parameters VARCHAR(512) -); - -CREATE TABLE process2bitstream -( - process_id INTEGER REFERENCES process(process_id), - bitstream_id RAW(16) REFERENCES bitstream(uuid), - CONSTRAINT PK_process2bitstream PRIMARY KEY (process_id, bitstream_id) -); - -CREATE INDEX process_user_id_idx ON process(user_id); -CREATE INDEX process_status_idx ON process(status); -CREATE INDEX process_name_idx on process(script); -CREATE INDEX process_start_time_idx on process(start_time); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql deleted file mode 100644 index a108fd74b468..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the IRUS statistics harvester -------------------------------------------------------------- - -CREATE SEQUENCE openurltracker_seq; - -CREATE TABLE openurltracker -( - tracker_id NUMBER, - tracker_url VARCHAR2(1000), - uploaddate DATE, - CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql deleted file mode 100644 index 9c39091f89dc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql +++ /dev/null @@ -1,56 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- --- Move all 'relationship.type' metadata fields to 'dspace.entity.type'. Remove 'relationship' schema. -------------------------------------------------------------------------------------------------------- --- Special case: we need to the 'dspace' schema to already exist. If users don't already have it we must create it --- manually via SQL, as by default it won't be created until database updates are finished. -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) - SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/dspace' as namespace, 'dspace' as short_id FROM dual - WHERE NOT EXISTS - (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry - WHERE namespace = 'http://dspace.org/dspace' AND short_id = 'dspace'); - - --- Add 'dspace.entity.type' field to registry (if missing) -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace'), 'entity', 'type' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entitye' AND qualifier='type'); - --- Moves all 'relationship.type' field values to a new 'dspace.entity.type' field -UPDATE metadatavalue - SET metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entity' AND qualifier='type') - WHERE metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='relationship') - AND element = 'type' AND qualifier is NULL); - - --- Delete 'relationship.type' field from registry -DELETE FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id = 'relationship') - AND element = 'type' AND qualifier is NULL; - --- Delete 'relationship' schema (which is now empty) -DELETE FROM metadataschemaregistry WHERE short_id = 'relationship' AND namespace = 'http://dspace.org/relationship'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql deleted file mode 100644 index 5a6abda04101..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql +++ /dev/null @@ -1,28 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------- -UPDATE metadatavalue SET dspace_object_id = (SELECT uuid - FROM collection - WHERE template_item_id = dspace_object_id) -WHERE dspace_object_id IN (SELECT template_item_id - FROM Collection) - AND metadata_field_id - IN (SELECT metadata_field_id - FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql deleted file mode 100644 index ae8f1e7ef5d2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql +++ /dev/null @@ -1,15 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------------- ----- ALTER table collection -------------------------------------------------------------------------------------- - -ALTER TABLE collection DROP COLUMN workflow_step_1; -ALTER TABLE collection DROP COLUMN workflow_step_2; -ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql deleted file mode 100644 index 9c39c15e66e2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Make sure the metadatavalue.place column starts at 0 instead of 1 ----------------------------------------------------- -MERGE INTO metadatavalue mdv -USING ( - SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace - FROM metadatavalue - GROUP BY dspace_object_id, metadata_field_id -) mp -ON ( - mdv.dspace_object_id = mp.dspace_object_id - AND mdv.metadata_field_id = mp.metadata_field_id - AND mp.minplace > 0 -) -WHEN MATCHED THEN UPDATE -SET mdv.place = mdv.place - mp.minplace; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql deleted file mode 100644 index 14bf8531439f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for ORCID access tokens ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_token_id_seq; - -CREATE TABLE orcid_token -( - id INTEGER NOT NULL, - eperson_id RAW(16) NOT NULL UNIQUE, - profile_item_id RAW(16), - access_token VARCHAR2(100) NOT NULL, - CONSTRAINT orcid_token_pkey PRIMARY KEY (id), - CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), - CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql @@ -0,0 +1 @@ + diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql deleted file mode 100644 index 3eb9ae6dd4f8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) -ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql deleted file mode 100644 index 3862830230e3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql +++ /dev/null @@ -1,45 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ - - -CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ -CREATE TABLE if NOT EXISTS subscription_parameter -( - subscription_parameter_id INTEGER NOT NULL, - name VARCHAR(255), - value VARCHAR(255), - subscription_id INTEGER NOT NULL, - CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), - CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) - REFERENCES subscription (subscription_id) ON DELETE CASCADE -); --- -- - -ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; ----- -- -ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); --- -UPDATE subscription SET dspace_object_id = collection_id , type = 'content'; --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; -ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_collection_id_fkey; ----- -- -ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; --- -- -INSERT INTO subscription_parameter (subscription_parameter_id, name, value, subscription_id) -SELECT getnextid('subscription_parameter'), 'frequency', 'D', subscription_id from "subscription" ; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql deleted file mode 100644 index c7bb0b502ec2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql +++ /dev/null @@ -1,78 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store supervision orders -------------------------------------------------------------------------------- - -CREATE TABLE supervision_orders -( - id INTEGER PRIMARY KEY, - item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, - eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE -); - -CREATE SEQUENCE supervision_orders_seq; - -INSERT INTO supervision_orders (id, item_id, eperson_group_id) -SELECT supervision_orders_seq.nextval AS id, w.item_id, e.uuid -FROM epersongroup2workspaceitem ew INNER JOIN workspaceitem w -ON ew.workspace_item_id = w.workspace_item_id -INNER JOIN epersongroup e -ON ew.eperson_group_id = e.uuid; - - --- UPDATE policies for supervision orders --- items, bundles and bitstreams - -DECLARE -BEGIN - -FOR rec IN -( -SELECT so.item_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN RESOURCEPOLICY rp on so.item_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT ib.bundle_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN RESOURCEPOLICY rp on ib.bundle_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT bs.bitstream_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN bundle2bitstream bs ON ib.bundle_id = bs.bundle_id -INNER JOIN RESOURCEPOLICY rp on bs.bitstream_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL -) - -LOOP - -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_SUBMISSION' -where dspace_object = rec.dspace_object -AND epersongroup_id = rec.eperson_group_id -AND rptype IS NULL; - -END LOOP; -END; - -------------------------------------------------------------------------------- --- drop epersongroup2workspaceitem table -------------------------------------------------------------------------------- - -DROP TABLE epersongroup2workspaceitem; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..6b2dd705ea68 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process MODIFY (parameters CLOB); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql deleted file mode 100644 index b4d4d755cbe7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql +++ /dev/null @@ -1,77 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- SQL code to update the ID (primary key) generating sequences, if some --- import operation has set explicit IDs. --- --- Sequences are used to generate IDs for new rows in the database. If a --- bulk import operation, such as an SQL dump, specifies primary keys for --- imported data explicitly, the sequences are out of sync and need updating. --- This SQL code does just that. --- --- This should rarely be needed; any bulk import should be performed using the --- org.dspace.content API which is safe to use concurrently and in multiple --- JVMs. The SQL code below will typically only be required after a direct --- SQL data dump from a backup or somesuch. - --- The 'updateseq' procedure was derived from incseq.sql found at: --- http://www.akadia.com/services/scripts/incseq.sql - -DECLARE - PROCEDURE updateseq ( seq IN VARCHAR, - tbl IN VARCHAR, - attr IN VARCHAR, - cond IN VARCHAR DEFAULT '' ) IS - curr NUMBER := 0; - BEGIN - EXECUTE IMMEDIATE 'SELECT max(' || attr - || ') FROM ' || tbl - || ' ' || cond - INTO curr; - curr := curr + 1; - EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq; - EXECUTE IMMEDIATE 'CREATE SEQUENCE ' - || seq - || ' START WITH ' - || NVL(curr, 1); - END updateseq; - -BEGIN - updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry', - 'bitstream_format_id'); - updateseq('fileextension_seq', 'fileextension', 'file_extension_id'); - updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id'); - updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id'); - updateseq('registrationdata_seq', 'registrationdata', - 'registrationdata_id'); - updateseq('subscription_seq', 'subscription', 'subscription_id'); - updateseq('metadatafieldregistry_seq', 'metadatafieldregistry', - 'metadata_field_id'); - updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id'); - updateseq('metadataschemaregistry_seq', 'metadataschemaregistry', - 'metadata_schema_id'); - updateseq('harvested_collection_seq', 'harvested_collection', 'id'); - updateseq('harvested_item_seq', 'harvested_item', 'id'); - updateseq('webapp_seq', 'webapp', 'webapp_id'); - updateseq('requestitem_seq', 'requestitem', 'requestitem_id'); - updateseq('handle_id_seq', 'handle', 'handle_id'); - - -- Handle Sequence is a special case. Since Handles minted by DSpace - -- use the 'handle_seq', we need to ensure the next assigned handle - -- will *always* be unique. So, 'handle_seq' always needs to be set - -- to the value of the *largest* handle suffix. That way when the - -- next handle is assigned, it will use the next largest number. This - -- query does the following: - -- For all 'handle' values which have a number in their suffix - -- (after '/'), find the maximum suffix value, convert it to a - -- number, and set the 'handle_seq' to start at the next value (see - -- updateseq above for more). - updateseq('handle_seq', 'handle', - q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}', - q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}'); -END; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md index 72eb279912b5..e16e4c6d4c91 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md @@ -3,8 +3,9 @@ The SQL scripts in this directory are PostgreSQL-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. @@ -22,7 +23,7 @@ Please see the Flyway Documentation for more information: http://flywaydb.org/ The `update-sequences.sql` script in this directory may still be used to update your internal database counts if you feel they have gotten out of "sync". This may sometimes occur after large restores of content (e.g. when using the DSpace -[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore) +[AIP Backup and Restore](https://wiki.lyrasis.org/display/DSDOC7x/AIP+Backup+and+Restore) feature). This `update-sequences.sql` script can be executed by running diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql similarity index 65% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql index f71173abe607..e4544e1de729 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -7,11 +7,7 @@ -- ----------------------------------------------------------------------------------- --- Drop the 'workflowitem' and 'tasklistitem' tables +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) ----------------------------------------------------------------------------------- -DROP TABLE workflowitem CASCADE CONSTRAINTS; -DROP TABLE tasklistitem CASCADE CONSTRAINTS; - -DROP SEQUENCE workflowitem_seq; -DROP SEQUENCE tasklistitem_seq; \ No newline at end of file +DROP SEQUENCE IF EXISTS history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql similarity index 57% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql index 9d13138fdada..8aec44a7f6f2 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -7,16 +7,11 @@ -- ----------------------------------------------------------------------------------- --- Create table for System wide alerts +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 ----------------------------------------------------------------------------------- -CREATE SEQUENCE alert_id_seq; - -CREATE TABLE systemwidealert -( - alert_id INTEGER NOT NULL PRIMARY KEY, - message VARCHAR(512), - allow_sessions VARCHAR(64), - countdown_to TIMESTAMP, - active BOOLEAN -); +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..ae0e414e4440 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description TYPE TEXT; +ALTER TABLE orcid_queue ALTER COLUMN description TYPE TEXT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..f7e0e51d0bf7 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters TYPE TEXT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql index 749f82382c9d..f96434f1ba8c 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql @@ -19,21 +19,41 @@ -- JVMs. The SQL code below will typically only be required after a direct -- SQL data dump from a backup or somesuch. - +SELECT setval('alert_id_seq', max(alert_id)) FROM systemwidealert; SELECT setval('bitstreamformatregistry_seq', max(bitstream_format_id)) FROM bitstreamformatregistry; +SELECT setval('checksum_history_check_id_seq', max(check_id)) FROM checksum_history; +SELECT setval('cwf_claimtask_seq', max(claimtask_id)) FROM cwf_claimtask; +SELECT setval('cwf_collectionrole_seq', max(collectionrole_id)) FROM cwf_collectionrole; +SELECT setval('cwf_in_progress_user_seq', max(in_progress_user_id)) FROM cwf_in_progress_user; +SELECT setval('cwf_pooltask_seq', max(pooltask_id)) FROM cwf_pooltask; +SELECT setval('cwf_workflowitem_seq', max(workflowitem_id)) FROM cwf_workflowitem; +SELECT setval('cwf_workflowitemrole_seq', max(workflowitemrole_id)) FROM cwf_workflowitemrole; +SELECT setval('doi_seq', max(doi_id)) FROM doi; +SELECT setval('entity_type_id_seq', max(id)) FROM entity_type; SELECT setval('fileextension_seq', max(file_extension_id)) FROM fileextension; -SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; -SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; -SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; -SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; -SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; -SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('handle_id_seq', max(handle_id)) FROM handle; SELECT setval('harvested_collection_seq', max(id)) FROM harvested_collection; SELECT setval('harvested_item_seq', max(id)) FROM harvested_item; -SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; +SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; +SELECT setval('openurltracker_seq', max(tracker_id)) FROM openurltracker; +SELECT setval('orcid_history_id_seq', max(id)) FROM orcid_history; +SELECT setval('orcid_queue_id_seq', max(id)) FROM orcid_queue; +SELECT setval('orcid_token_id_seq', max(id)) FROM orcid_token; +SELECT setval('process_id_seq', max(process_id)) FROM process; +SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; +SELECT setval('relationship_id_seq', max(id)) FROM relationship; +SELECT setval('relationship_type_id_seq', max(id)) FROM relationship_type; SELECT setval('requestitem_seq', max(requestitem_id)) FROM requestitem; -SELECT setval('handle_id_seq', max(handle_id)) FROM handle; +SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; +SELECT setval('subscription_parameter_seq', max(subscription_id)) FROM subscription_parameter; +SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; +SELECT setval('supervision_orders_seq', max(id)) FROM supervision_orders; +SELECT setval('versionhistory_seq', max(versionhistory_id)) FROM versionhistory; +SELECT setval('versionitem_seq', max(versionitem_id)) FROM versionitem; +SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq', -- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq' diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql deleted file mode 100644 index 9bca3a17c99e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql deleted file mode 100644 index 917078594cfa..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql +++ /dev/null @@ -1,37 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- --- Alter workflow item -ALTER TABLE workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE workflowitem SET item_id = (SELECT item.uuid FROM item WHERE workflowitem.item_legacy_id = item.item_id); -ALTER TABLE workflowitem DROP COLUMN item_legacy_id; - --- Migrate task list item -ALTER TABLE TasklistItem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE TasklistItem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -UPDATE TasklistItem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE TasklistItem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE TasklistItem DROP COLUMN eperson_legacy_id; - --- Migrate task workflow item -ALTER TABLE workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workflowitem DROP COLUMN collection_legacy_id; -ALTER TABLE workflowitem RENAME COLUMN owner to owner_legacy_id; -ALTER TABLE workflowitem ADD owner RAW(16) REFERENCES EPerson (uuid); -UPDATE workflowitem SET owner = (SELECT eperson.uuid FROM eperson WHERE workflowitem.owner_legacy_id = eperson.eperson_id); -ALTER TABLE workflowitem DROP COLUMN owner_legacy_id; -UPDATE workflowitem SET state = -1 WHERE state IS NULL; -UPDATE workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql deleted file mode 100644 index b3887a5af4d1..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND dspace_object = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql deleted file mode 100644 index 7a992836eea6..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql +++ /dev/null @@ -1,141 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -UPDATE collection SET workflow_step_1 = null; -UPDATE collection SET workflow_step_2 = null; -UPDATE collection SET workflow_step_3 = null; - --- cwf_workflowitem - -DROP INDEX cwf_workflowitem_coll_fk_idx; - -ALTER TABLE cwf_workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE cwf_workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE cwf_workflowitem SET item_id = (SELECT item.uuid FROM item WHERE cwf_workflowitem.item_legacy_id = item.item_id); -ALTER TABLE cwf_workflowitem DROP COLUMN item_legacy_id; - -ALTER TABLE cwf_workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_workflowitem DROP COLUMN collection_legacy_id; - -UPDATE cwf_workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE cwf_workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE cwf_workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - --- cwf_collectionrole - -ALTER TABLE cwf_collectionrole DROP CONSTRAINT cwf_collectionrole_unique; -DROP INDEX cwf_cr_coll_role_fk_idx; -DROP INDEX cwf_cr_coll_fk_idx; - -ALTER TABLE cwf_collectionrole RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_collectionrole ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_collectionrole SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_collectionrole.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_collectionrole DROP COLUMN collection_legacy_id; - -ALTER TABLE cwf_collectionrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_collectionrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_collectionrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_collectionrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_collectionrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - --- cwf_workflowitemrole - -ALTER TABLE cwf_workflowitemrole DROP CONSTRAINT cwf_workflowitemrole_unique; -DROP INDEX cwf_wfir_item_role_fk_idx; -DROP INDEX cwf_wfir_item_fk_idx; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_workflowitemrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_workflowitemrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_workflowitemrole SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_workflowitemrole.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN eperson_legacy_id; - - -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - --- cwf_pooltask - -DROP INDEX cwf_pt_eperson_fk_idx; -DROP INDEX cwf_pt_workflow_eperson_fk_idx; - -ALTER TABLE cwf_pooltask RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_pooltask ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_pooltask SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_pooltask.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_pooltask DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_pooltask RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_pooltask ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_pooltask SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_pooltask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_pooltask DROP COLUMN eperson_legacy_id; - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - --- cwf_claimtask - -ALTER TABLE cwf_claimtask DROP CONSTRAINT cwf_claimtask_unique; -DROP INDEX cwf_ct_workflow_fk_idx; -DROP INDEX cwf_ct_workflow_eperson_fk_idx; -DROP INDEX cwf_ct_eperson_fk_idx; -DROP INDEX cwf_ct_wfs_fk_idx; -DROP INDEX cwf_ct_wfs_action_fk_idx; -DROP INDEX cwf_ct_wfs_action_e_fk_idx; - -ALTER TABLE cwf_claimtask RENAME COLUMN owner_id to eperson_legacy_id; -ALTER TABLE cwf_claimtask ADD owner_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_claimtask SET owner_id = (SELECT eperson.uuid FROM eperson WHERE cwf_claimtask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_claimtask DROP COLUMN eperson_legacy_id; - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - --- cwf_in_progress_user - -ALTER TABLE cwf_in_progress_user DROP CONSTRAINT cwf_in_progress_user_unique; -DROP INDEX cwf_ipu_workflow_fk_idx; -DROP INDEX cwf_ipu_eperson_fk_idx; - -ALTER TABLE cwf_in_progress_user RENAME COLUMN user_id to eperson_legacy_id; -ALTER TABLE cwf_in_progress_user ADD user_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_in_progress_user SET user_id = (SELECT eperson.uuid FROM eperson WHERE cwf_in_progress_user.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_in_progress_user DROP COLUMN eperson_legacy_id; -UPDATE cwf_in_progress_user SET finished = '0' WHERE finished IS NULL; - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql deleted file mode 100644 index 0402fc994887..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql +++ /dev/null @@ -1,27 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- UPDATE policies for claimtasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id JOIN item ON cwf_workflowitem.item_id = item.uuid) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT item2bundle.bundle_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT bundle2bitstream.bitstream_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Create policies for pooled tasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql deleted file mode 100644 index f582f37c6931..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE resource_type_id = 2 AND resource_id IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 1 AND resource_id IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 0 AND resource_id IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql deleted file mode 100644 index 70eb419d8fbb..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql deleted file mode 100644 index 541af73dfe01..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0) --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id RAW(16) REFERENCES item(uuid) UNIQUE, - collection_id RAW(16) REFERENCES collection(uuid), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id RAW(16) REFERENCES collection(uuid), -group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id RAW(16) REFERENCES eperson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id RAW(16) REFERENCES EPerson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id RAW(16) REFERENCES eperson(uuid) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id RAW(16) REFERENCES eperson(uuid), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql deleted file mode 100644 index f8f0e564e824..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES item(item_id) UNIQUE, - collection_id INTEGER REFERENCES collection(collection_id), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id integer REFERENCES collection(collection_id), -group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id integer REFERENCES eperson(eperson_id), - group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id INTEGER REFERENCES EPerson(eperson_id), - group_id INTEGER REFERENCES epersongroup(eperson_group_id) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id integer REFERENCES eperson(eperson_id) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id integer REFERENCES eperson(eperson_id), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index e5b943c5c20f..3572a42bf41c 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -65,6 +65,7 @@ + xml diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml deleted file mode 100644 index b9c11f8164d6..000000000000 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml index 14b66cca7c9c..77859506546c 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml @@ -16,15 +16,6 @@ http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> - - - - @@ -34,12 +25,6 @@ - - - - diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 0b7def31ca3a..6ad8e441bb20 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -100,7 +100,7 @@ utils submission - + org.dspace.app.rest.submit.step.DescribeStep submission-form @@ -115,7 +115,7 @@ org.dspace.app.rest.submit.step.AccessConditionStep accessCondition - + submit.progressbar.accessCondition org.dspace.app.rest.submit.step.AccessConditionStep @@ -146,7 +146,7 @@ detect-duplicate - + submit.progressbar.ExtractMetadataStep org.dspace.app.rest.submit.step.ExtractMetadataStep extract @@ -214,7 +214,7 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form - + submit.progressbar.sherpapolicy org.dspace.app.rest.submit.step.SherpaPolicyStep @@ -231,8 +231,8 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form - - + + submit.progressbar.CustomUrlStep org.dspace.app.rest.submit.step.CustomUrlStep custom-url @@ -329,7 +329,7 @@ - + @@ -337,7 +337,7 @@ - + @@ -414,7 +414,7 @@ - + @@ -436,11 +436,11 @@ - + - + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index fb8b2863506e..df96d9775c3c 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -43,7 +43,7 @@ dspace.server.url = http://localhost db.driver = org.h2.Driver db.dialect=org.hibernate.dialect.H2Dialect # Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors -db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=ROW\,VALUE +db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=ROW\,VALUE;TIME ZONE=UTC db.username = sa db.password = # H2's default schema is PUBLIC diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml index 3aad934931dc..6962418da96a 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml @@ -100,4 +100,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml index 12a3b4e9565d..3056b535032e 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml @@ -19,7 +19,18 @@ + scope="singleton"> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true + + withdrawn:true OR discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:WorkspaceItem AND supervised:true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + search.entitytype:${researcher-profile.entity-type:Person} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + + + + + + + + + + + relation.isAuthorOfPublication.latestForDiscovery + + + + + + + + + + + relation.isProjectOfPublication.latestForDiscovery + + + + + + + + + + + + relation.isOrgUnitOfPublication.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfJournalIssue.latestForDiscovery + + + + + + + + + + + relation.isJournalOfPublication.latestForDiscovery + + + + + + + + + + + dc.contributor.author + dc.creator + + + + + + + + + + + + + + + dspace.entity.type + + + + + + + + + + + + + + dc.subject.* + + + + + + + + + + + + + + dc.date.issued + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.type + + + + + + + + + dc.identifier + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + person.jobTitle + + + + + + + + + + + + + + + person.knowsLanguage + + + + + + + + + + + + + person.birthDate + + + + + + + + + + + + + + + + + person.familyName + + + + + + + + + + + person.givenName + + + + + + + + + + + relation.isOrgUnitOfPerson.latestForDiscovery + + + + + + + + + + + relation.isProjectOfPerson.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfAuthor.latestForDiscovery + + + + + + + + + + + + organization.address.addressCountry + + + + + + + + + + + + + + + organization.address.addressLocality + + + + + + + + + + + + + + + organization.foundingDate + + + + + + + + + + + + + + + + organization.legalName + + + + + + + + + + + relation.isPersonOfOrgUnit.latestForDiscovery + + + + + + + + + + + relation.isProjectOfOrgUnit.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfOrgUnit.latestForDiscovery + + + + + + + + + + + creativework.keywords + + + + + + + + + + + + + + + creativework.datePublished + + + + + + + + + + + + + + + + publicationissue.issueNumber + + + + + + + + + + + relation.isPublicationOfJournalIssue.latestForDiscovery + + + + + + + + + + + publicationVolume.volumeNumber + + + + + + + + + + + relation.isIssueOfJournalVolume.latestForDiscovery + + + + + + + + + + + relation.isJournalOfVolume.latestForDiscovery + + + + + + + + + + + creativework.publisher + + + + + + + + + + + + + + + creativework.editor + + + + + + + + + + + + + + + relation.isVolumeOfJournal.latestForDiscovery + + + + + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + relation.isOrgUnitOfProject.latestForDiscovery + + + + + + + + + + + + relation.isPersonOfProject.latestForDiscovery + + + + + + + + + + + + relation.isPublicationOfProject.latestForDiscovery + + + + + + + + + + + relation.isContributorOfPublication.latestForDiscovery + + + + + + + + + + + relation.isPublicationOfContributor.latestForDiscovery + + + + + + + + + + + relation.isFundingAgencyOfProject.latestForDiscovery + + + + + + + + + + + relation.isProjectOfFundingAgency.latestForDiscovery + + + + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index b40cfa1704a5..1f668b9e1616 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -63,4 +63,9 @@ + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java new file mode 100644 index 000000000000..da0f90ca97c7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java @@ -0,0 +1,502 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static com.jayway.jsonpath.JsonPath.read; +import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.assertTrue; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter; +import org.dspace.app.rest.model.ParameterValueRest; +import org.dspace.app.rest.model.ProcessRest; +import org.dspace.app.rest.model.ScriptRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.test.AbstractEntityIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.ProcessBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.Process; +import org.dspace.scripts.service.ProcessService; +import org.junit.After; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.mock.web.MockMultipartFile; + +/** + * Basic integration testing for the bulk access Import feature via UI {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.com) + */ +public class BulkAccessControlScriptIT extends AbstractEntityIntegrationTest { + + @Autowired + private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; + + @Autowired + private GroupService groupService; + + @Autowired + private ProcessService processService; + + private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME; + private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/bulk-access-control/" + + ProcessRest.PLURAL_NAME; + + @After + @Override + public void destroy() throws Exception { + List processes = processService.findAll(context); + for (Process process : processes) { + ProcessBuilder.deleteProcess(process.getID()); + } + + super.destroy(); + } + + @Test + public void bulkAccessScriptWithAdminUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .withSubject("ExtraEntry") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", item.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetCommunityTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .withAdminGroup(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", parentCommunity.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetCollectionTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", collection.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithAdminUserOfTargetItemTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .withSubject("ExtraEntry") + .withAdminUser(eperson) + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", item.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithMultipleTargetUuidsWithAdminUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collection) + .withTitle("Public item one") + .build(); + + Item itemTwo = ItemBuilder.createItem(context, collection) + .withTitle("Public item two") + .build(); + + Item itemThree = ItemBuilder.createItem(context, collection) + .withTitle("Public item three") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + AtomicReference idRef = new AtomicReference<>(); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", itemOne.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-u", itemTwo.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-u", itemThree.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + try { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isAccepted()) + .andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.processId"))); + + itemOne = context.reloadEntity(itemOne); + itemTwo = context.reloadEntity(itemTwo); + itemThree = context.reloadEntity(itemThree); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(itemOne.getResourcePolicies(), hasSize(1)); + assertThat(itemTwo.getResourcePolicies(), hasSize(1)); + assertThat(itemThree.getResourcePolicies(), hasSize(1)); + + assertThat(itemOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + assertThat(itemTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + assertThat(itemThree.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "openaccess", TYPE_CUSTOM) + )); + + } finally { + ProcessBuilder.deleteProcess(idRef.get()); + } + } + + @Test + public void bulkAccessScriptWithoutTargetUUIDParameterTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .param("properties", new ObjectMapper().writeValueAsString(List.of())) + ) + .andExpect(status().isInternalServerError()) + .andExpect(result -> assertTrue(result.getResolvedException() + .getMessage() + .contains("At least one target uuid must be provided"))); + } + + @Test + public void bulkAccessScriptWithNormalUserTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); + + MockMultipartFile bitstreamFile = + new MockMultipartFile("file", "test.json", MediaType.TEXT_PLAIN_VALUE, inputStream); + + LinkedList parameters = new LinkedList<>(); + + parameters.add(new DSpaceCommandLineParameter("-u", parentCommunity.getID().toString())); + parameters.add(new DSpaceCommandLineParameter("-f", "test.json")); + + + List list = + parameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token) + .perform( + multipart(CURATE_SCRIPT_ENDPOINT) + .file(bitstreamFile) + .param("properties", new ObjectMapper().writeValueAsString(list))) + .andExpect(status().isForbidden()); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java index d839ab81fba9..f3bbae17be17 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java @@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; @@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest { private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private Collection collection; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -126,6 +128,10 @@ public void importItemByZipSafWithBitstreams() throws Exception { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java index ac03e946e320..1ddea619d2fc 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/opensearch/OpenSearchControllerIT.java @@ -249,4 +249,24 @@ public void serviceDocumentTest() throws Exception { */ } + + @Test + public void emptyDescriptionTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection collection1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1") + .build(); + + getClient().perform(get("/opensearch/search") + .param("format", "rss") + .param("scope", collection1.getID().toString()) + .param("query", "*")) + .andExpect(status().isOk()) + .andExpect(xpath("rss/channel/description").string("No Description")); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java index 1a6cc29ca75c..fd128269308d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamFormatRestRepositoryIT.java @@ -56,7 +56,7 @@ public class BitstreamFormatRestRepositoryIT extends AbstractControllerIntegrati @Autowired private BitstreamFormatConverter bitstreamFormatConverter; - private final int DEFAULT_AMOUNT_FORMATS = 81; + private final int DEFAULT_AMOUNT_FORMATS = 85; @Test public void findAllPaginationTest() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index bc62143ccdf7..72d783ef2299 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,12 +17,14 @@ import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; +import static org.dspace.core.Constants.DEFAULT_BITSTREAM_READ; import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; @@ -56,6 +58,7 @@ import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; @@ -70,6 +73,7 @@ import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.disseminate.CitationDocumentServiceImpl; import org.dspace.eperson.EPerson; @@ -112,6 +116,12 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest @Autowired private BitstreamFormatService bitstreamFormatService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private CollectionService collectionService; + private Bitstream bitstream; private BitstreamFormat supportedFormat; private BitstreamFormat knownFormat; @@ -626,6 +636,54 @@ public void testPrivateBitstream() throws Exception { } + @Test + public void testBitstreamDefaultReadInheritanceFromCollection() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community and one collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + // Explicitly create a restrictive default bitstream read policy on the collection + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + authorizeService.removePoliciesActionFilter(context, col1, DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, col1, DEFAULT_BITSTREAM_READ, internalGroup); + + //2. A public item with a new bitstream that is not explicitly restricted + // but should instead inherit + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + // make sure this item has no default policies for a new bundle to inherit + authorizeService.removePoliciesActionFilter(context, publicItem1, DEFAULT_BITSTREAM_READ); + + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Test Restricted Bitstream") + .withDescription("This bitstream is restricted") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + //** WHEN ** + //We download the bitstream + getClient().perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + //** THEN ** + .andExpect(status().isUnauthorized()); + + //An unauthorized request should not log statistics + checkNumberOfStatsRecords(bitstream, 0); + } + @Test public void restrictedGroupBitstreamForbiddenTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -1265,4 +1323,57 @@ public void testEmbargoedBitstreamWithCrisSecurity() throws Exception { checkNumberOfStatsRecords(bitstream, 2); } + + @Test + public void checkContentDispositionOfFormats() throws Exception { + configurationService.setProperty("webui.content_disposition_format", new String[] { + "text/richtext", + "text/xml", + "txt" + }); + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).build(); + String content = "Test Content"; + Bitstream rtf; + Bitstream xml; + Bitstream txt; + Bitstream html; + try (InputStream is = IOUtils.toInputStream(content, CharEncoding.UTF_8)) { + rtf = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/richtext").build(); + xml = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/xml").build(); + txt = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/plain").build(); + html = BitstreamBuilder.createBitstream(context, item, is) + .withMimeType("text/html").build(); + } + context.restoreAuthSystemState(); + + // these formats are configured and files should be downloaded + verifyBitstreamDownload(rtf, "text/richtext;charset=UTF-8", true); + verifyBitstreamDownload(xml, "text/xml;charset=UTF-8", true); + verifyBitstreamDownload(txt, "text/plain;charset=UTF-8", true); + // this format is not configured and should open inline + verifyBitstreamDownload(html, "text/html;charset=UTF-8", false); + } + + private void verifyBitstreamDownload(Bitstream file, String contentType, boolean shouldDownload) throws Exception { + String token = getAuthToken(admin.getEmail(), password); + String header = getClient(token).perform(get("/api/core/bitstreams/" + file.getID() + "/content") + .header("Accept", contentType)) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andReturn().getResponse().getHeader("content-disposition"); + if (shouldDownload) { + assertTrue(header.contains("attachment")); + assertFalse(header.contains("inline")); + } else { + assertTrue(header.contains("inline")); + assertFalse(header.contains("attachment")); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index b850d973e4f3..ac89b90dfbbe 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -7,10 +7,13 @@ */ package org.dspace.app.rest; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.apache.commons.codec.CharEncoding.UTF_8; import static org.apache.commons.io.IOUtils.toInputStream; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.Matchers.contains; @@ -19,6 +22,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; @@ -27,9 +31,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.UUID; +import javax.ws.rs.core.MediaType; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -39,6 +45,7 @@ import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; @@ -61,16 +68,21 @@ import org.dspace.content.WorkspaceItem; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.util.UUIDUtils; import org.hamcrest.Matchers; +import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.MvcResult; public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -89,6 +101,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest @Autowired private ItemService itemService; + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + @Test public void findAllTest() throws Exception { //We turn off the authorization system in order to create the structure as defined below @@ -346,7 +364,7 @@ public void findOneBitstreamTest_EmbargoedBitstream_Anon() throws Exception { Item publicItem1; Bitstream bitstream; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, org.apache.commons.lang3.CharEncoding.UTF_8)) { + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { publicItem1 = ItemBuilder.createItem(context, col1) .withTitle("Public item 1") @@ -1320,6 +1338,7 @@ public void patchReplaceMultipleDescriptionBitstream() throws Exception { bitstream = BitstreamBuilder. createBitstream(context, publicItem1, is) .withName("Bitstream") + .withMimeType("text/plain") .build(); } @@ -2925,4 +2944,514 @@ public void findShowableByItem() throws Exception { ); } + @Test + public void deleteBitstreamsInBulk() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Verify that only the three bitstreams were deleted and the fourth one still exists + Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3)); + Assert.assertTrue(bitstreamExists(token, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidUUID() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // For the third bitstream, use an invalid UUID + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + UUID randomUUID = UUID.randomUUID(); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + MvcResult result = getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()) + .andReturn(); + + // Verify our custom error message is returned when an invalid UUID is used + assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository", + result.getResponse().getErrorMessage()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // But set the rest.patch.operations.limit property to 2, so that the request is invalid + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_Unauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient().perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + } + + @Test + public void deleteBitstreamsInBulk_Forbidden() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void deleteBitstreamsInBulk_collectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson col1Admin = EPersonBuilder.createEPerson(context) + .withEmail("col1admin@test.com") + .withPassword(password) + .build(); + EPerson col2Admin = EPersonBuilder.createEPerson(context) + .withEmail("col2admin@test.com") + .withPassword(password) + .build(); + Group col1_AdminGroup = collectionService.createAdministrators(context, col1); + Group col2_AdminGroup = collectionService.createAdministrators(context, col2); + groupService.addMember(context, col1_AdminGroup, col1Admin); + groupService.addMember(context, col2_AdminGroup, col2Admin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(col1Admin.getEmail(), password); + // Should return forbidden since one of the bitstreams does not originate form collection 1 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(2); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Change the token to the admin of collection 2 + token = getAuthToken(col2Admin.getEmail(), password); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + ops = new ArrayList<>(); + removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp1); + removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp2); + removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID()); + ops.add(removeOp3); + patchBody = getPatchContent(ops); + + // Should return forbidden since one of the bitstreams does not originate form collection 2 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(0); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + @Test + public void deleteBitstreamsInBulk_communityAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context) + .withEmail("parentComAdmin@test.com") + .withPassword(password) + .build(); + Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity); + groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(parentCommunityAdmin.getEmail(), password); + // Bitstreams originate from two different collections, but those collections live in the same community, so + // a community admin should be able to delete them + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_OK) { + return false; + } + } + return true; + } + + public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_NOT_FOUND) { + return false; + } + } + return true; + } + + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 427235e1367a..35dc090df479 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -23,6 +24,7 @@ import org.dspace.app.rest.matcher.BrowseEntryResourceMatcher; import org.dspace.app.rest.matcher.BrowseIndexMatcher; import org.dspace.app.rest.matcher.ItemMatcher; +import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -63,9 +65,10 @@ public void findAll() throws Exception { //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page + //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(5))) .andExpect(jsonPath("$.page.totalElements", is(11))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) @@ -132,6 +135,21 @@ public void findBrowseByContributor() throws Exception { ; } + @Test + public void findBrowseByVocabulary() throws Exception { + //Use srsc as this vocabulary is included by default + //When we call the root endpoint + getClient().perform(get("/api/discover/browses/srsc")) + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //Check that the JSON root matches the expected browse index + .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc"))) + ; + } + @Test public void findBrowseBySubject() throws Exception { //When we call the root endpoint @@ -490,6 +508,251 @@ public void findBrowseBySubjectItems() throws Exception { .andExpect(jsonPath("$.page.size", is(20))); } + @Test + public void findBrowseBySubjectItemsWithScope() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Two public items with the same subject and another public item that contains that same subject, but also + // another one + // All of the items are readable by an Anonymous user + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("zPublic item more") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry").withSubject("AnotherTest") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Public item 2") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest") + .build(); + + Item publicItem3 = ItemBuilder.createItem(context, col2) + .withTitle("Public item 3") + .withIssueDate("2016-02-14") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest") + .build(); + + Item withdrawnItem1 = ItemBuilder.createItem(context, col2) + .withTitle("Withdrawn item 1") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest").withSubject("TestingForMore") + .withSubject("ExtraEntry").withSubject("WithdrawnEntry") + .withdrawn() + .build(); + Item privateItem1 = ItemBuilder.createItem(context, col2) + .withTitle("Private item 1") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest").withSubject("TestingForMore") + .withSubject("ExtraEntry").withSubject("PrivateEntry") + .makeUnDiscoverable() + .build(); + + context.restoreAuthSystemState(); + + //** WHEN ** + //An anonymous user browses the items that correspond with the ExtraEntry subject query + getClient().perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "ExtraEntry")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be no elements in collection 2 + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$.page.size", is(20))); + + //** WHEN ** + //An anonymous user browses the items that correspond with the AnotherTest subject query + getClient().perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "AnotherTest")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be only two elements, the ones that we've added with the requested subject + // in collection 2 + .andExpect(jsonPath("$.page.totalElements", is(2))) + .andExpect(jsonPath("$.page.size", is(20))) + //Verify that the title of the public and embargoed items are present and sorted descending + .andExpect(jsonPath("$._embedded.items", contains( + ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, "Public item 2", "2016-02-13"), + ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3, "Public item 3", "2016-02-14") + ))); + + //** WHEN ** + //An anonymous user browses the items that correspond with the PrivateEntry subject query + getClient().perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "PrivateEntry")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be no elements because the item is private + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$.page.size", is(20))); + + //** WHEN ** + //An anonymous user browses the items that correspond with the WithdrawnEntry subject query + getClient().perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "WithdrawnEntry")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be no elements because the item is withdrawn + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$.page.size", is(20))); + } + + @Test + public void findBrowseBySubjectItemsWithScopeAsAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Two public items with the same subject and another public item that contains that same subject, but also + // another one + // All of the items are readable by an Anonymous user + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("zPublic item more") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry").withSubject("AnotherTest") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Public item 2") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest") + .build(); + + Item publicItem3 = ItemBuilder.createItem(context, col2) + .withTitle("Public item 3") + .withIssueDate("2016-02-14") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest") + .build(); + + Item withdrawnItem1 = ItemBuilder.createItem(context, col2) + .withTitle("Withdrawn item 1") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest").withSubject("TestingForMore") + .withSubject("ExtraEntry").withSubject("WithdrawnEntry") + .withdrawn() + .build(); + Item privateItem1 = ItemBuilder.createItem(context, col2) + .withTitle("Private item 1") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("AnotherTest").withSubject("TestingForMore") + .withSubject("ExtraEntry").withSubject("PrivateEntry") + .makeUnDiscoverable() + .build(); + + context.restoreAuthSystemState(); + + String adminToken = getAuthToken(admin.getEmail(), password); + + + //** WHEN ** + //An admin user browses the items that correspond with the ExtraEntry subject query + getClient(adminToken).perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "ExtraEntry")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be no elements in collection 2 + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$.page.size", is(20))); + + //** WHEN ** + //An admin user browses the items that correspond with the AnotherTest subject query + getClient(adminToken).perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "AnotherTest")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be only two elements, the ones that we've added with the requested subject + // in collection 2 + .andExpect(jsonPath("$.page.totalElements", is(2))) + .andExpect(jsonPath("$.page.size", is(20))) + //Verify that the title of the public and embargoed items are present and sorted descending + .andExpect(jsonPath("$._embedded.items", contains( + ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, "Public item 2", "2016-02-13"), + ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3, "Public item 3", "2016-02-14") + ))); + + //** WHEN ** + //An admin user browses the items that correspond with the PrivateEntry subject query + getClient(adminToken).perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "PrivateEntry")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be no elements because the item is private + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$.page.size", is(20))); + + //** WHEN ** + //An admin user browses the items that correspond with the WithdrawnEntry subject query + getClient(adminToken).perform(get("/api/discover/browses/subject/items") + .param("scope", String.valueOf(col2.getID())) + .param("filterValue", "WithdrawnEntry")) + //** THEN ** + //The status has to be 200 + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + //We expect there to be no elements because the item is withdrawn + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$.page.size", is(20))); + } + @Test public void findBrowseByTitleItems() throws Exception { context.turnOffAuthorisationSystem(); @@ -627,6 +890,135 @@ public void findBrowseByTitleItems() throws Exception { not(matchMetadata("dc.title", "Internal publication"))))); } + @Test + public void findBrowseByTitleItemsWithScope() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Two public items that are readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("Java").withSubject("Unit Testing") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Public item 2") + .withIssueDate("2016-02-13") + .withAuthor("Smith, Maria").withAuthor("Doe, Jane") + .withSubject("Angular").withSubject("Unit Testing") + .build(); + + //3. An item that has been made private + Item privateItem = ItemBuilder.createItem(context, col2) + .withTitle("This is a private item") + .withIssueDate("2015-03-12") + .withAuthor("Duck, Donald") + .withSubject("Cartoons").withSubject("Ducks") + .makeUnDiscoverable() + .build(); + + //4. An item with an item-level embargo + Item embargoedItem = ItemBuilder.createItem(context, col2) + .withTitle("An embargoed publication") + .withIssueDate("2017-08-10") + .withAuthor("Mouse, Mickey") + .withSubject("Cartoons").withSubject("Mice") + .withEmbargoPeriod("12 months") + .build(); + + //5. An item that is only readable for an internal groups + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + + Item internalItem = ItemBuilder.createItem(context, col2) + .withTitle("Internal publication") + .withIssueDate("2016-09-19") + .withAuthor("Doe, John") + .withSubject("Unknown") + .withReaderGroup(internalGroup) + .build(); + + context.restoreAuthSystemState(); + + //** WHEN ** + //An anonymous user browses the items in the Browse by item endpoint + //sorted descending by tile + getClient().perform(get("/api/discover/browses/title/items") + .param("scope", String.valueOf(col2.getID())) + .param("sort", "title,desc")) + + //** THEN ** + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(1))) + .andExpect(jsonPath("$.page.number", is(0))) + + .andExpect(jsonPath("$._embedded.items", + contains(ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, + "Public item 2", + "2016-02-13")))) + + //The private and internal items must not be present + .andExpect(jsonPath("$._embedded.items[*].metadata", Matchers.allOf( + not(matchMetadata("dc.title", "This is a private item")), + not(matchMetadata("dc.title", "Internal publication"))))); + + String adminToken = getAuthToken(admin.getEmail(), password); + //** WHEN ** + //An admin user browses the items in the Browse by item endpoint + //sorted descending by tile + getClient(adminToken).perform(get("/api/discover/browses/title/items") + .param("scope", String.valueOf(col2.getID())) + .param("sort", "title,desc")) + + //** THEN ** + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))) + .andExpect(jsonPath("$.page.totalPages", is(1))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$._embedded.items", contains( + ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, + "Public item 2", + "2016-02-13"), + ItemMatcher.matchItemWithTitleAndDateIssued(internalItem, + "Internal publication", + "2016-09-19"), + ItemMatcher.matchItemWithTitleAndDateIssued(embargoedItem, + "An embargoed publication", + "2017-08-10") + + ))) + + + //The private and internal items must not be present + .andExpect(jsonPath("$._embedded.items[*].metadata", Matchers.allOf( + not(matchMetadata("dc.title", "This is a private item")) + ))); + } + @Test /** * This test was introduced to reproduce the bug DS-4269 Pagination links must be consistent also when there is not @@ -852,6 +1244,133 @@ public void testPaginationBrowseByDateIssuedItems() throws Exception { ))); } + @Test + public void testPaginationBrowseByDateIssuedItemsWithScope() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. 7 public items that are readable by Anonymous + Item item1 = ItemBuilder.createItem(context, col1) + .withTitle("Item 1") + .withIssueDate("2017-10-17") + .build(); + + Item item2 = ItemBuilder.createItem(context, col2) + .withTitle("Item 2") + .withIssueDate("2016-02-13") + .build(); + + Item item3 = ItemBuilder.createItem(context, col1) + .withTitle("Item 3") + .withIssueDate("2016-02-12") + .build(); + + Item item4 = ItemBuilder.createItem(context, col2) + .withTitle("Item 4") + .withIssueDate("2016-02-11") + .build(); + + Item item5 = ItemBuilder.createItem(context, col1) + .withTitle("Item 5") + .withIssueDate("2016-02-10") + .build(); + + Item item6 = ItemBuilder.createItem(context, col2) + .withTitle("Item 6") + .withIssueDate("2016-01-13") + .build(); + + Item item7 = ItemBuilder.createItem(context, col1) + .withTitle("Item 7") + .withIssueDate("2016-01-12") + .build(); + + Item withdrawnItem1 = ItemBuilder.createItem(context, col2) + .withTitle("Withdrawn item 1") + .withIssueDate("2016-02-13") + .withdrawn() + .build(); + + Item privateItem1 = ItemBuilder.createItem(context, col2) + .withTitle("Private item 1") + .makeUnDiscoverable() + .build(); + + + context.restoreAuthSystemState(); + + //** WHEN ** + //An anonymous user browses the items in the Browse by date issued endpoint + //sorted ascending by tile with a page size of 5 + getClient().perform(get("/api/discover/browses/dateissued/items") + .param("scope", String.valueOf(col2.getID())) + .param("sort", "title,asc") + .param("size", "5")) + + //** THEN ** + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //We expect only the first five items to be present + .andExpect(jsonPath("$.page.size", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(3))) + .andExpect(jsonPath("$.page.totalPages", is(1))) + .andExpect(jsonPath("$.page.number", is(0))) + + //Verify that the title and date of the items match and that they are sorted ascending + .andExpect(jsonPath("$._embedded.items", + contains( + ItemMatcher.matchItemWithTitleAndDateIssued(item2, + "Item 2", "2016-02-13"), + ItemMatcher.matchItemWithTitleAndDateIssued(item4, + "Item 4", "2016-02-11"), + ItemMatcher.matchItemWithTitleAndDateIssued(item6, + "Item 6", "2016-01-13") + ))); + + String adminToken = getAuthToken(admin.getEmail(), password); + getClient(adminToken).perform(get("/api/discover/browses/dateissued/items") + .param("scope", String.valueOf(col2.getID())) + .param("sort", "title,asc") + .param("size", "5")) + + //** THEN ** + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //We expect only the first five items to be present + .andExpect(jsonPath("$.page.size", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(3))) + .andExpect(jsonPath("$.page.totalPages", is(1))) + .andExpect(jsonPath("$.page.number", is(0))) + + //Verify that the title and date of the items match and that they are sorted ascending + .andExpect(jsonPath("$._embedded.items", + contains( + ItemMatcher.matchItemWithTitleAndDateIssued(item2, + "Item 2", "2016-02-13"), + ItemMatcher.matchItemWithTitleAndDateIssued(item4, + "Item 4", "2016-02-11"), + ItemMatcher.matchItemWithTitleAndDateIssued(item6, + "Item 6", "2016-01-13") + ))); + + } + @Test public void testBrowseByEntriesStartsWith() throws Exception { context.turnOffAuthorisationSystem(); @@ -1708,7 +2227,7 @@ public void findOneLinked() throws Exception { // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))) + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))) ; } @@ -1725,7 +2244,7 @@ public void findOneLinkedPassingTwoFields() throws Exception { // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))); + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java new file mode 100644 index 000000000000..ecca60c7e53f --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BulkAccessConditionRestRepositoryIT.java @@ -0,0 +1,256 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; +import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.AccessConditionOptionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Test; + +/** + * Integration test class for the bulkaccessconditionoptions endpoint. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionRestRepositoryIT extends AbstractControllerIntegrationTest { + + @Test + public void findAllByAdminUserTest() throws Exception { + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken) + .perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))) + .andExpect(jsonPath("$._embedded.bulkaccessconditionoptions", containsInAnyOrder(allOf( + hasJsonPath("$.id", is("default")), + hasJsonPath("$.itemAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + ), + hasJsonPath("$.bitstreamAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + ))))); + } + + @Test + public void findAllByAdminUserOfAnCommunityTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + // create community and assign eperson to admin group + CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()); + } + + @Test + public void findAllByAdminUserOfAnCollectionTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // create collection and assign eperson to admin group + CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()); + } + + @Test + public void findAllByAdminUserOfAnItemTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + // create item and assign eperson as admin user + ItemBuilder.createItem(context, collection) + .withTitle("item") + .withAdminUser(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isOk()); + } + + @Test + public void findAllByNormalUserTest() throws Exception { + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isForbidden()); + } + + @Test + public void findAllByAnonymousUserTest() throws Exception { + getClient().perform(get("/api/config/bulkaccessconditionoptions")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneByAdminTest() throws Exception { + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.id", is("default"))) + .andExpect(jsonPath("$.itemAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + )) + .andExpect(jsonPath("$.bitstreamAccessConditionOptions", Matchers.containsInAnyOrder( + AccessConditionOptionMatcher.matchAccessConditionOption("openaccess", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("embargo", true , false, "+36MONTHS", null), + AccessConditionOptionMatcher.matchAccessConditionOption("administrator", false , false, null, null), + AccessConditionOptionMatcher.matchAccessConditionOption("lease", false , true, null, "+6MONTHS")) + )) + .andExpect(jsonPath("$.type", is("bulkaccessconditionoption"))); + } + + @Test + public void findOneByAdminOfAnCommunityTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + // create community and assign eperson to admin group + CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + + getClient(authToken) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()); + } + + @Test + public void findOneByAdminOfAnCollectionTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // create collection and assign eperson to admin group + CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + + getClient(authToken) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()); + } + + @Test + public void findOneByAdminOfAnItemTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Community community = + CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + // create item and assign eperson as admin user + ItemBuilder.createItem(context, collection) + .withTitle("item") + .withAdminUser(eperson) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isOk()); + } + + @Test + public void findOneByNormalUserTest() throws Exception { + String tokenEPerson = getAuthToken(eperson.getEmail(), password); + getClient(tokenEPerson) + .perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isForbidden()); + } + + @Test + public void findOneByAnonymousUserTest() throws Exception { + getClient().perform(get("/api/config/bulkaccessconditionoptions/default")) + .andExpect(status().isUnauthorized()); + } + + + @Test + public void findOneNotFoundTest() throws Exception { + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/config/bulkaccessconditionoptions/wrong")) + .andExpect(status().isNotFound()); + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java index 735d0321ec84..98d9c4644107 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionRestRepositoryIT.java @@ -539,7 +539,7 @@ public void findOneCollectionGrantAccessAdminsTest() throws Exception { getClient(tokenParentAdmin).perform(get("/api/core/collections/" + col1.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", - Matchers.is(CollectionMatcher.matchCollection(col1)))); + Matchers.is((CollectionMatcher.matchCollection(col1))))); String tokenCol1Admin = getAuthToken(col1Admin.getEmail(), "qwerty02"); getClient(tokenCol1Admin).perform(get("/api/core/collections/" + col1.getID())) @@ -3647,6 +3647,7 @@ public void patchReplaceMultipleDescriptionCollection() throws Exception { ); } + @Test public void patchMetadataCheckReindexingTest() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 1b17215054ad..a8417e84f809 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -146,13 +146,15 @@ private ArrayList getRecords() { + " Medical College of Prevention of Iodine Deficiency Diseases"); MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "L.V. Senyuk"); MetadatumDTO type = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.184"); MetadatumDTO issn = createMetadatumDTO("dc", "relation", "issn", "2415-3060"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums.add(title); metadatums.add(author); @@ -163,6 +165,7 @@ private ArrayList getRecords() { metadatums.add(issn); metadatums.add(volume); metadatums.add(issue); + metadatums.add(publisher); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -172,13 +175,15 @@ private ArrayList getRecords() { "Ischemic Heart Disease and Role of Nurse of Cardiology Department"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "K. І. Kozak"); MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.105"); MetadatumDTO issn2 = createMetadatumDTO("dc", "relation", "issn", "2415-3060"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums2.add(title2); metadatums2.add(author2); @@ -189,6 +194,7 @@ private ArrayList getRecords() { metadatums2.add(issn2); metadatums2.add(volume2); metadatums2.add(issue2); + metadatums2.add(publisher2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); @@ -196,4 +202,4 @@ private ArrayList getRecords() { return records; } -} \ No newline at end of file +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java new file mode 100644 index 000000000000..a3408a7736df --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -0,0 +1,677 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.FacetEntryMatcher; +import org.dspace.app.rest.matcher.FacetValueMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * This class tests the correct inheritance of Discovery configurations for sub communities and collections. + * To thoroughly test this, a community and collection structure is set up to where different communities have custom + * configurations configured for them. + * + * The following structure is uses: + * - Parent Community 1 - Custom configuration: discovery-parent-community-1 + * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1 + * -- Collection 111 - Custom configuration: discovery-collection-1-1-1 + * -- Collection 112 + * -- Subcommunity 12 + * -- Collection 121 - Custom configuration: discovery-collection-1-2-1 + * -- Collection 122 + * - Parent Community 2 + * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1 + * -- Collection 211 - Custom configuration: discovery-collection-2-1-1 + * -- Collection 212 + * -- Subcommunity 22 + * -- Collection 221 - Custom configuration: discovery-collection-2-2-1 + * -- Collection 222 + * + * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is + * indexed and provided for the different search scopes. + * + * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222, + * and one in collection 122 and 211. + * + * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to + * power these facets are indexed properly. + * + * This file requires the discovery configuration in the following test file: + * src/test/data/dspaceFolder/config/spring/api/test-discovery.xml + */ +public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + CollectionService collectionService; + + private Community parentCommunity1; + private Community subcommunity11; + private Community subcommunity12; + private Collection collection111; + private Collection collection112; + private Collection collection121; + private Collection collection122; + + private Community parentCommunity2; + private Community subcommunity21; + private Community subcommunity22; + private Collection collection211; + private Collection collection212; + private Collection collection221; + private Collection collection222; + + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); + + parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); + subcommunity11 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1") + .build(); + subcommunity12 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2") + .build(); + collection111 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") + .build(); + collection112 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2") + .build(); + collection121 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1") + .build(); + + collection122 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") + .build(); + + parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); + + + subcommunity21 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1") + .build(); + subcommunity22 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2") + .build(); + collection211 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") + .build(); + collection212 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2") + .build(); + collection221 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1") + .build(); + collection222 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2") + .build(); + + + Item item111 = ItemBuilder.createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-item111") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111") + .withMetadata("dc", "test", "collection111field", "collection111field-item111") + .withMetadata("dc", "test", "collection121field", "collection121field-item111") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111") + .withMetadata("dc", "test", "collection211field", "collection211field-item111") + .withMetadata("dc", "test", "collection221field", "collection221field-item111") + .build(); + + Item item112 = ItemBuilder.createItem(context, collection112) + .withMetadata("dc", "contributor", "author", "author-item112") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112") + .withMetadata("dc", "test", "collection111field", "collection111field-item112") + .withMetadata("dc", "test", "collection121field", "collection121field-item112") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112") + .withMetadata("dc", "test", "collection211field", "collection211field-item112") + .withMetadata("dc", "test", "collection221field", "collection221field-item112") + .build(); + + Item item121 = ItemBuilder.createItem(context, collection121) + .withMetadata("dc", "contributor", "author", "author-item121") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121") + .withMetadata("dc", "test", "collection111field", "collection111field-item121") + .withMetadata("dc", "test", "collection121field", "collection121field-item121") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121") + .withMetadata("dc", "test", "collection211field", "collection211field-item121") + .withMetadata("dc", "test", "collection221field", "collection221field-item121") + .build(); + + Item item122 = ItemBuilder.createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-item122") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122") + .withMetadata("dc", "test", "collection111field", "collection111field-item122") + .withMetadata("dc", "test", "collection121field", "collection121field-item122") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122") + .withMetadata("dc", "test", "collection211field", "collection211field-item122") + .withMetadata("dc", "test", "collection221field", "collection221field-item122") + .build(); + + Item item211 = ItemBuilder.createItem(context, collection211) + .withMetadata("dc", "contributor", "author", "author-item211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211") + .withMetadata("dc", "test", "collection111field", "collection111field-item211") + .withMetadata("dc", "test", "collection121field", "collection121field-item211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211") + .withMetadata("dc", "test", "collection211field", "collection211field-item211") + .withMetadata("dc", "test", "collection221field", "collection221field-item211") + .build(); + + Item item212 = ItemBuilder.createItem(context, collection212) + .withMetadata("dc", "contributor", "author", "author-item212") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212") + .withMetadata("dc", "test", "collection111field", "collection111field-item212") + .withMetadata("dc", "test", "collection121field", "collection121field-item212") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212") + .withMetadata("dc", "test", "collection211field", "collection211field-item212") + .withMetadata("dc", "test", "collection221field", "collection221field-item212") + .build(); + + Item item221 = ItemBuilder.createItem(context, collection221) + .withMetadata("dc", "contributor", "author", "author-item221") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221") + .withMetadata("dc", "test", "collection111field", "collection111field-item221") + .withMetadata("dc", "test", "collection121field", "collection121field-item221") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221") + .withMetadata("dc", "test", "collection211field", "collection211field-item221") + .withMetadata("dc", "test", "collection221field", "collection221field-item221") + .build(); + + Item item222 = ItemBuilder.createItem(context, collection222) + .withMetadata("dc", "contributor", "author", "author-item222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222") + .withMetadata("dc", "test", "collection111field", "collection111field-item222") + .withMetadata("dc", "test", "collection121field", "collection121field-item222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222") + .withMetadata("dc", "test", "collection211field", "collection211field-item222") + .withMetadata("dc", "test", "collection221field", "collection221field-item222") + .build(); + + Item mappedItem111222 = ItemBuilder + .createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-mappedItem111222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222") + .build(); + + + Item mappedItem122211 = ItemBuilder + .createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-mappedItem122211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211") + .build(); + + + collectionService.addItem(context, collection222, mappedItem111222); + collectionService.addItem(context, collection211, mappedItem122211); + + + context.dispatchEvents(); + context.restoreAuthSystemState(); + } + + @Test + /** + * Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(parentCommunity1.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item111", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item112", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem111222", + 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + + + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(subcommunity11.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item111", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111. + */ + public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection111field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection111field") + .param("scope", String.valueOf(collection111.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection111field", + "collection111field-item111", 1), + FacetValueMatcher.matchEntry("collection111field", + "collection111field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited + * correctly for Collection 112. + */ + public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(collection112.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Subcommunity 12. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(subcommunity12.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121. + */ + public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection121field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection121field") + .param("scope", String.valueOf(collection121.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection121field", + "collection121field-item121", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Collection 122. + */ + public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(collection122.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Parent Community 2. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(subcommunity21.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item211", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211. + */ + public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection211field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection211field") + .param("scope", String.valueOf(collection211.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection211field", + "collection211field-item211", 1), + FacetValueMatcher.matchEntry("collection211field", + "collection211field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited + * correctly for Collection 212. + */ + public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(collection212.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Subcommunity 22. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221. + */ + public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection221field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection221field") + .param("scope", String.valueOf(collection221.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection221field", + "collection221field-item221", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Collection 222. + */ + public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java index 55e82831f3d1..1fd9e81ca88d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java @@ -33,6 +33,7 @@ import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.core.Constants; import org.hamcrest.Matchers; @@ -243,6 +244,35 @@ public void patchTemplateItem() throws Exception { ))))); } + /* Similar to patchTemplateItem(), except it is for collection admin, not repository admin + Test case was simplified, since it does not do anything else. + */ + @Test + public void patchTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(patch(getTemplateItemUrlTemplate(itemId)) + .content(patchBody) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + + getClient(collAdminToken).perform(get(getCollectionTemplateItemUrlTemplate(childCollection.getID().toString()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + } + @Test public void patchIllegalInArchiveTemplateItem() throws Exception { setupTestTemplate(); @@ -337,6 +367,22 @@ public void deleteTemplateItem() throws Exception { .andExpect(status().isNoContent()); } + /*Similar to deleteTemplateItem(), except it is for collection admin, not repository admin + */ + @Test + public void deleteTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(delete(getTemplateItemUrlTemplate(itemId))) + .andExpect(status().isNoContent()); + } + @Test public void deleteTemplateItemNoRights() throws Exception { setupTestTemplate(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index f1a1a095b16e..72508a0dad58 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -88,7 +88,7 @@ public void createSuccess() throws Exception { context.turnOffAuthorisationSystem(); MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") - .build(); + .build(); context.restoreAuthSystemState(); MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); @@ -116,6 +116,41 @@ public void createSuccess() throws Exception { } } + @Test + public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") + .build(); + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); + metadataSchemaRest.setPrefix("test.SchemaName"); + metadataSchemaRest.setNamespace(TEST_NAMESPACE); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test,SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedTest() throws Exception { @@ -202,7 +237,7 @@ public void update() throws Exception { MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); metadataSchemaRest.setId(metadataSchema.getID()); - metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setPrefix(TEST_NAME); metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) @@ -214,7 +249,33 @@ public void update() throws Exception { getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", MetadataschemaMatcher - .matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED))); + .matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED))); + } + + @Test + public void update_schemaNameShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE) + .build(); + + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); + metadataSchemaRest.setId(metadataSchema.getID()); + metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadataschemas/" + metadataSchema.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataschemaMatcher + .matchEntry(TEST_NAME, TEST_NAMESPACE))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index 70b76e1afd6d..a615c58c11ae 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -9,6 +9,7 @@ import static com.jayway.jsonpath.JsonPath.read; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -49,12 +50,12 @@ */ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest { - private static final String ELEMENT = "test element"; - private static final String QUALIFIER = "test qualifier"; + private static final String ELEMENT = "test_element"; + private static final String QUALIFIER = "test_qualifier"; private static final String SCOPE_NOTE = "test scope_note"; - private static final String ELEMENT_UPDATED = "test element updated"; - private static final String QUALIFIER_UPDATED = "test qualifier updated"; + private static final String ELEMENT_UPDATED = "test_element_updated"; + private static final String QUALIFIER_UPDATED = "test_qualifier_updated"; private static final String SCOPE_NOTE_UPDATED = "test scope_note updated"; private MetadataSchema metadataSchema; @@ -564,6 +565,70 @@ public void findByFieldName_exactName_combinedDiscoveryQueryParams_qualifier() t .andExpect(status().isUnprocessableEntity()); } + @Test + public void findByFieldName_sortByFieldNameASC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,ASC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField2), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField1) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + + @Test + public void findByFieldName_sortByFieldNameDESC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,DESC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField1), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField2) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + @Test public void createSuccess() throws Exception { @@ -575,7 +640,8 @@ public void createSuccess() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -606,7 +672,8 @@ public void createBlankQualifier() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); Integer id = null; try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + null), nullValue()); id = read( getClient(authToken) @@ -641,7 +708,8 @@ public void create_checkAddedToIndex() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -689,6 +757,94 @@ public void createUnauthorized() throws Exception { .andExpect(status().isUnauthorized()); } + @Test + public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement("testElement.ForCreate"); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier("testQualifier.ForCreate"); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedEPersonNoAdminRights() throws Exception { @@ -832,31 +988,81 @@ public void update() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) - .build(); + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isOk()); + } + + @Test + public void update_elementShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); context.restoreAuthSystemState(); MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setId(metadataField.getID()); metadataFieldRest.setElement(ELEMENT_UPDATED); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); + } + + @Test + public void update_qualifierShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); metadataFieldRest.setQualifier(QUALIFIER_UPDATED); metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) .perform(put("/api/core/metadatafields/" + metadataField.getID()) - .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) - .contentType(contentType)) - .andExpect(status().isOk()); + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( - metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED) - )); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); } @Test - public void update_checkUpdatedInIndex() throws Exception { + public void update_checkNotUpdatedInIndex() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) @@ -885,27 +1091,27 @@ public void update_checkUpdatedInIndex() throws Exception { .perform(put("/api/core/metadatafields/" + metadataField.getID()) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) - .andExpect(status().isOk()); + .andExpect(status().isUnprocessableEntity()); - // new metadata field found in index + // new metadata field not found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", ELEMENT_UPDATED) .param("qualifier", QUALIFIER_UPDATED)) .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( - MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), - ELEMENT_UPDATED, QUALIFIER_UPDATED)) - )) - .andExpect(jsonPath("$.page.totalElements", is(1))); + .andExpect(jsonPath("$.page.totalElements", is(0))); - // original metadata field not found in index + // original metadata field found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", metadataField.getElement()) .param("qualifier", metadataField.getQualifier())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( + MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), + ELEMENT, QUALIFIER)) + )) + .andExpect(jsonPath("$.page.totalElements", is(1))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java new file mode 100644 index 000000000000..b5c67c640fff --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PrimaryBitstreamControllerIT.java @@ -0,0 +1,624 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.InputStream; +import java.util.UUID; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.dspace.app.rest.matcher.BitstreamMatcher; +import org.dspace.app.rest.matcher.BundleMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Tests for the PrimaryBitstreamController + */ +public class PrimaryBitstreamControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + BundleService bundleService; + @Autowired + BitstreamService bitstreamService; + + Item item; + Bitstream bitstream; + Bundle bundle; + Community community; + Collection collection; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + community = CommunityBuilder.createCommunity(context).build(); + collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection).build(); + + // create bitstream in ORIGINAL bundle of item + String bitstreamContent = "TEST CONTENT"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + bundle = item.getBundles("ORIGINAL").get(0); + context.restoreAuthSystemState(); + } + + @Test + public void testGetPrimaryBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BitstreamMatcher.matchProperties(bitstream))); + } + + @Test + public void testGetPrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testGetPrimaryBitstreamNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isNoContent()) + .andExpect(jsonPath("$").doesNotExist()); + } + + @Test + public void testPostPrimaryBitstream() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle.getName(), bundle.getID(), + bundle.getHandle(), bundle.getType()))); + // verify primaryBitstream was actually added + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(UUID.randomUUID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isNotFound()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamInvalidBitstream() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamAlreadyExists() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamNotInBundle() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testPostPrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Community Admin can't set a primaryBitstream outside their own Community + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Collection Admin can't set a primaryBitstream outside their own Collection + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually added + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream2, bundle2.getPrimaryBitstream()); + + // verify Item Admin can't set a primaryBitstream outside their own Item + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamForbidden() throws Exception { + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testPostPrimaryBitstreamUnauthenticated() throws Exception { + getClient().perform(post(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isUnauthorized()); + } + + @Test + public void testUpdatePrimaryBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle.getName(), bundle.getID(), + bundle.getHandle(), bundle.getType()))); + // verify primaryBitstream was actually updated + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream2, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(UUID.randomUUID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testUpdatePrimaryBitstreamInvalidBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamNotInBundle() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bundle bundle2 = BundleBuilder.createBundle(context, item).withName("Bundle2").build(); + Bitstream bitstream2 = createBitstream(bundle2); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnprocessableEntity()); + // verify primaryBitstream is still the original one + bundle = context.reloadEntity(bundle); + Assert.assertEquals(bitstream, bundle.getPrimaryBitstream()); + } + + @Test + public void testUpdatePrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Community Admin can't update a primaryBitstream outside their own Community + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Collection Admin can't update a primaryBitstream outside their own Collection + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + Bitstream bitstream3 = createBitstream(bundle2); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle2.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream3.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", BundleMatcher.matchProperties(bundle2.getName(), bundle2.getID(), + bundle2.getHandle(), bundle2.getType()))); + // verify primaryBitstream was actually updated + bundle2 = context.reloadEntity(bundle2); + Assert.assertEquals(bitstream3, bundle2.getPrimaryBitstream()); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Item Admin can't update a primaryBitstream outside their own Item + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamForbidden() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testUpdatePrimaryBitstreamUnauthenticated() throws Exception { + context.turnOffAuthorisationSystem(); + bundle.setPrimaryBitstreamID(bitstream); + Bitstream bitstream2 = createBitstream(bundle); + context.restoreAuthSystemState(); + + getClient().perform(put(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream2.getID()))) + .andExpect(status().isUnauthorized()); + } + + @Test + public void testDeletePrimaryBitstream() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle.getBitstreams().size()); + Assert.assertEquals(bitstream, bundle.getBitstreams().get(0)); + } + + @Test + public void testDeletePrimaryBitstreamBundleNotFound() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(UUID.randomUUID()))) + .andExpect(status().isNotFound()); + } + + @Test + public void testDeletePrimaryBitstreamBundleNonExisting() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isBadRequest()); + // verify primaryBitstream is still null + bundle = context.reloadEntity(bundle); + Assert.assertNull(bundle.getPrimaryBitstream()); + } + + @Test + public void testDeletePrimaryBitstreamCommunityAdmin() throws Exception { + // create new structure with Admin permissions on Community + context.turnOffAuthorisationSystem(); + Community com2 = CommunityBuilder.createCommunity(context).withAdminGroup(eperson).build(); + Collection col2 = CollectionBuilder.createCollection(context, com2).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Community Admin can't delete a primaryBitstream outside their own Community + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamCollectionAdmin() throws Exception { + // create new structure with Admin permissions on Collection + context.turnOffAuthorisationSystem(); + Collection col2 = CollectionBuilder.createCollection(context, community).withAdminGroup(eperson).build(); + Item item2 = ItemBuilder.createItem(context, col2).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Collection Admin can't delete a primaryBitstream outside their own Collection + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamItemAdmin() throws Exception { + // create new structure with Admin permissions on Item + context.turnOffAuthorisationSystem(); + Item item2 = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + Bundle bundle2 = BundleBuilder.createBundle(context, item2).withName("ORIGINAL").build(); + Bitstream bitstream2 = createBitstream(bundle2); + bundle2.setPrimaryBitstreamID(bitstream2); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle2.getID()))) + .andExpect(status().isNoContent()); + // verify primaryBitstream was actually deleted + bundle2 = context.reloadEntity(bundle2); + Assert.assertNull(bundle2.getPrimaryBitstream()); + // verify bitstream itself still exists + Assert.assertEquals(1, bundle2.getBitstreams().size()); + Assert.assertEquals(bitstream2, bundle2.getBitstreams().get(0)); + + bundle.setPrimaryBitstreamID(bitstream); + // verify Item Admin can't delete a primaryBitstream outside their own Item + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID())) + .contentType(textUriContentType) + .content(getBitstreamUrl(bitstream.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamForbidden() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isForbidden()); + } + + @Test + public void testDeletePrimaryBitstreamUnauthenticated() throws Exception { + bundle.setPrimaryBitstreamID(bitstream); + + getClient().perform(delete(getBundlePrimaryBitstreamUrl(bundle.getID()))) + .andExpect(status().isUnauthorized()); + } + + private String getBundlePrimaryBitstreamUrl(UUID uuid) { + return "/api/core/bundles/" + uuid + "/primaryBitstream"; + } + + private String getBitstreamUrl(UUID uuid) { + return "/api/core/bitstreams/" + uuid; + } + + private Bitstream createBitstream(Bundle bundle) throws Exception { + String bitstreamContent = "Bitstream Content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + return BitstreamBuilder.createBitstream(context, bundle, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 6cf4d58df799..305f8aac7fe8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -243,22 +243,35 @@ public void getAllProcessesTestStartingUser() throws Exception { @Test public void getProcessFiles() throws Exception { + context.setCurrentUser(eperson); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); - try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + getClient(epersonToken) + .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); } @Test @@ -266,25 +279,34 @@ public void getProcessFilesByFileType() throws Exception { Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - } @Test public void getProcessFilesTypes() throws Exception { + Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } List fileTypesToCheck = new LinkedList<>(); @@ -292,12 +314,18 @@ public void getProcessFilesTypes() throws Exception { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) .andExpect(status().isOk()) .andExpect(jsonPath("$", ProcessFileTypesMatcher - .matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); - + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ProcessFileTypesMatcher + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); } @Test @@ -806,30 +834,44 @@ public void searchProcessTestByUserSortedOnNonExistingIsSortedAsDefault() throws .andExpect(status().isBadRequest()); } - /** - * Test get process output by admin created by himself - */ @Test public void getProcessOutput() throws Exception { + context.setCurrentUser(eperson); + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); + processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO); } - processService.createLogBitstream(context, process); + processService.createLogBitstream(context, process1); List fileTypesToCheck = new LinkedList<>(); fileTypesToCheck.add("inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) + getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); + String epersonToken = getAuthToken(eperson.getEmail(), password); + + getClient(epersonToken) + .perform(get("/api/system/processes/" + process1.getID() + "/output")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.type", is("bitstream"))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", + is("script_output"))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java new file mode 100644 index 000000000000..3b39d251216c --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -0,0 +1,213 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl; + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Test + public void pubmedImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void pubmedImportMetadataGetRecords2Test() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords2(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, + "Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review."); + MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize" + + " the main strategies for teaching clinical reasoning described in the literature in the context of" + + " advanced clinical practice and promote new areas of research to improve the pedagogical approach" + + " to clinical reasoning in Advanced Practice Nursing."); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and" + + " clinical thinking are essential elements in the advanced nursing clinical practice decision-making" + + " process. The quality improvement of care is related to the development of those skills." + + " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical" + + " reasoning in advanced clinical practice."); + MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was" + + " conducted using the framework developed by Arksey and O'Malley as a research strategy." + + " Consistent with the nature of scoping reviews, a study protocol has been established."); + MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and" + + " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary" + + " revision studies, published in biomedical databases, were selected, including qualitative ones." + + " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID." + + " Three authors independently evaluated the articles for titles, abstracts, and full text."); + MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined," + + " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility," + + " and 27 were included in the scoping review. The results that emerged from the review were" + + " interpreted and grouped into three macro strategies (simulations-based education, art and visual" + + " thinking, and other learning approaches) and nineteen educational interventions."); + MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different" + + " strategies, the simulations are the most used. Despite this, our scoping review reveals that is" + + " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic" + + " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to" + + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + + " necessary to relate teaching methodologies with the skills developed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + + metadatums.add(title); + metadatums.add(description1); + metadatums.add(description2); + metadatums.add(description3); + metadatums.add(description4); + metadatums.add(description5); + metadatums.add(description6); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(author5); + metadatums.add(author6); + metadatums.add(date); + metadatums.add(language); + metadatums.add(subject1); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + + private ArrayList getRecords2() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez."); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely" + + " used interfaces for the retrieval of information from biological databases is the NCBI Entrez" + + " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between" + + " the individual entries found in numerous public databases. The existence of such natural" + + " connections, mostly biological in nature, argued for the development of a method through which" + + " all the information about a particular biological entity could be found without having to" + + " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based" + + " searches, illustrating the types of information that can be retrieved through the Entrez system." + + " An alternate protocol builds upon the first basic protocol, using additional," + + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + + " visualization tool, is also discussed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + + metadatums.add(title); + metadatums.add(description); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(date); + metadatums.add(language); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java index 93d963db2c8f..4cdbb6c11061 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java @@ -272,7 +272,7 @@ public void testRegisterDomainNotRegistered() throws Exception { } @Test - public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Exception { + public void testRegisterMailAddressRegistered() throws Exception { List registrationDataList = registrationDataDAO.findAll(context, RegistrationData.class); try { context.turnOffAuthorisationSystem(); @@ -282,7 +282,7 @@ public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Excepti .withCanLogin(true) .build(); context.restoreAuthSystemState(); - configurationService.setProperty("authentication-password.domain.valid", "test.com"); + RegistrationRest registrationRest = new RegistrationRest(); registrationRest.setEmail(email); @@ -291,9 +291,10 @@ public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Excepti .param(TYPE_QUERY_PARAM, TYPE_REGISTER) .content(mapper.writeValueAsBytes(registrationRest)) .contentType(contentType)) - .andExpect(status().isUnprocessableEntity()); + .andExpect(status().isCreated()); registrationDataList = registrationDataDAO.findAll(context, RegistrationData.class); - assertEquals(0, registrationDataList.size()); + assertEquals(1, registrationDataList.size()); + assertTrue(StringUtils.equalsIgnoreCase(registrationDataList.get(0).getEmail(), email)); } finally { Iterator iterator = registrationDataList.iterator(); while (iterator.hasNext()) { @@ -344,6 +345,7 @@ public void registrationFlowWithNoHeaderCaptchaTokenTest() throws Exception { // when reCAPTCHA enabled and request doesn't contain "X-Recaptcha-Token” header getClient().perform(post("/api/eperson/registrations") + .param(TYPE_QUERY_PARAM, TYPE_REGISTER) .content(mapper.writeValueAsBytes(registrationRest)) .contentType(contentType)) .andExpect(status().isForbidden()); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index da48a5adf43b..cbaca4707b13 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -8,14 +8,12 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; -import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.exparity.hamcrest.date.DateMatchers.within; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.text.IsEmptyString.emptyOrNullString; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; @@ -34,6 +32,7 @@ import java.sql.SQLException; import java.time.temporal.ChronoUnit; import java.util.Date; +import java.util.Iterator; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; @@ -221,33 +220,34 @@ public void testCreateAndReturnAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(eperson.getEmail())) { + // Verify request data + assertEquals(eperson.getFullName(), requestItem.getReqName()); + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(true, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); } - } +} /** * Test of createAndReturn method, with an UNauthenticated user. @@ -273,30 +273,32 @@ public void testCreateAndReturnNotAuthenticated() // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(RequestItemBuilder.REQ_EMAIL)) { + // Verify request data + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(RequestItemBuilder.REQ_NAME, requestItem.getReqName()); + assertEquals(bitstream.getID(), requestItem.getBitstream().getID()); + assertEquals(false, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 4c6cfae1334a..19133e17421a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -15,6 +15,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -66,6 +67,8 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; +import org.dspace.content.service.BitstreamService; +import org.dspace.eperson.EPerson; import org.dspace.content.authority.DCInputAuthority; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.MetadataAuthorityService; @@ -321,14 +324,72 @@ public void findAllScriptsSortedAlphabeticallyTest() throws Exception { } @Test - public void findAllScriptsWithNoAdminTest() throws Exception { + public void findAllScriptsGenericLoggedInUserTest() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/system/scripts")) .andExpect(status().isOk()) - .andExpect(jsonPath("$.page", - is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 4)))); + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + + @Test + public void findAllScriptsAnonymousUserTest() throws Exception { + // this should be changed once we allow anonymous user to execute some scripts + getClient().perform(get("/api/system/scripts")) + .andExpect(status().isUnauthorized()); + } + @Test + public void findAllScriptsLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + // the local admins have at least access to the curate script + // and not access to process-cleaner script + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); } @Test @@ -422,6 +483,63 @@ public void findOneScriptByNameTest() throws Exception { )); } + @Test + public void findOneScriptByNameLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + } + + @Test + public void findOneScriptByNameNotAuthenticatedTest() throws Exception { + getClient().perform(get("/api/system/scripts/mock-script")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneScriptByNameTestAccessDenied() throws Exception { String token = getAuthToken(eperson.getEmail(), password); @@ -436,12 +554,48 @@ public void findOneScriptByInvalidNameBadRequestExceptionTest() throws Exception .andExpect(status().isBadRequest()); } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins, nor generic users can run scripts reserved to administrator + * (i.e. default one that don't override the default + * {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation + */ @Test public void postProcessNonAdminAuthorizeException() throws Exception { - String token = getAuthToken(eperson.getEmail(), password); + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + String comAdmin_token = getAuthToken(eperson.getEmail(), password); + String colAdmin_token = getAuthToken(eperson.getEmail(), password); + String itemAdmin_token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) .andExpect(status().isForbidden()); + getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); } @Test @@ -475,16 +629,6 @@ public void postProcessAdminWrongOptionsException() throws Exception { @Test public void postProcessAdminNoOptionsFailedStatus() throws Exception { -// List list = new LinkedList<>(); -// -// ParameterValueRest parameterValueRest = new ParameterValueRest(); -// parameterValueRest.setName("-z"); -// parameterValueRest.setValue("test"); -// ParameterValueRest parameterValueRest1 = new ParameterValueRest(); -// parameterValueRest1.setName("-q"); -// list.add(parameterValueRest); -// list.add(parameterValueRest1); - LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-z", "test")); @@ -520,7 +664,7 @@ public void postProcessNonExistingScriptNameException() throws Exception { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -631,12 +775,19 @@ public void postProcessAndVerifyOutput() throws Exception { } + + @Test public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(post("/api/system/scripts/mock-script/processes")) + .andExpect(status().isBadRequest()); + getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -1412,6 +1563,7 @@ private void checkExportOutput( @Override @After public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { try { processService.delete(context, process); @@ -1419,6 +1571,7 @@ public void destroy() throws Exception { throw new RuntimeException(e); } }); + context.restoreAuthSystemState(); super.destroy(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index bd40cfdc9dd8..978d8feb58b9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -411,4 +411,114 @@ public void postTestSuccesEmptyQuery() throws Exception { .andExpect(status().isCreated()); } + + @Test + public void postTestWithClickedObjectSuccess() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(publicItem1.getID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + } + + @Test + public void postTestWithClickedObjectNotExisting() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(UUID.randomUUID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isBadRequest()); + + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index a0343d67e93d..9af95845d407 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -9,7 +9,6 @@ import static com.jayway.jsonpath.JsonPath.read; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; -import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index 9236e7c4ce25..b144cab40c5f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -761,22 +761,46 @@ public void visibilityTest() throws Exception { getClient(tokenAdmin).perform(get("/api/config/submissionforms/testVisibility")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$.id", is("testVisibility"))) - .andExpect(jsonPath("$.name", is("testVisibility"))) - .andExpect(jsonPath("$.type", is("submissionform"))) - .andExpect(jsonPath("$.rows[0].fields", contains( - matchFormWithoutVisibility("Title"), - matchFormWithVisibility("Date of Issue", - Map.of("submission", "read-only", "workflow", "hidden", "edit", "hidden")), - matchFormWithVisibility("Type", Map.of("workflow", "hidden", "edit", "hidden")), - matchFormWithVisibility("Language", - Map.of("submission", "read-only", "workflow", "read-only", "edit", "read-only")), - matchFormWithVisibility("Author(s)", Map.of("workflow", "read-only", "edit", "read-only")), - matchFormWithVisibility("Editor(s)", - Map.of("submission", "read-only", "workflow", "hidden", "edit", "hidden")), - matchFormWithVisibility("Subject(s)", - Map.of("submission", "hidden", "workflow", "read-only", "edit", "read-only")), - matchFormWithVisibility("Description", Map.of("submission", "hidden")) - ))); + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority"))) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=2"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) + .andExpect(jsonPath("$.page.number", is(3))); + + getClient(tokenAdmin).perform(get("/api/config/submissionforms") + .param("size", "2") + .param("page", "4")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) + .andExpect(jsonPath("$.page.number", is(4))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java index 5683bd30a84e..d49a4ce857d4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ViewEventRestRepositoryIT.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -17,6 +18,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocumentList; import org.dspace.app.rest.model.ViewEventRest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.BitstreamBuilder; @@ -29,10 +33,14 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.Site; +import org.dspace.statistics.SolrStatisticsCore; +import org.dspace.utils.DSpace; import org.junit.Test; public class ViewEventRestRepositoryIT extends AbstractControllerIntegrationTest { + private final SolrStatisticsCore solrStatisticsCore = new DSpace().getSingletonService(SolrStatisticsCore.class); + @Test public void findAllTestThrowNotImplementedException() throws Exception { @@ -494,5 +502,52 @@ public void postTestAuthenticatedUserSuccess() throws Exception { } + @Test + public void postTestReferrer() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(publicItem1.getID()); + viewEventRest.setReferrer("test-referrer"); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + solrStatisticsCore.getSolr().commit(); + + // Query all statistics and verify it contains a document with the correct referrer + SolrQuery solrQuery = new SolrQuery("*:*"); + QueryResponse queryResponse = solrStatisticsCore.getSolr().query(solrQuery); + SolrDocumentList responseList = queryResponse.getResults(); + assertEquals(1, responseList.size()); + assertEquals("test-referrer", responseList.get(0).get("referrer")); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index d9d2c0fcf708..5cfcbbe0de95 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -10122,5 +10122,4 @@ public void patchBySupervisorTest() throws Exception { "ExtraEntry") ))); } - } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java index e020c04b1a25..d1679ae1d20b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/eperson/DeleteEPersonSubmitterIT.java @@ -24,9 +24,11 @@ import java.util.concurrent.atomic.AtomicReference; import javax.ws.rs.core.MediaType; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.requestitem.RequestItemAuthor; import org.dspace.app.requestitem.RequestItemAuthorExtractor; +import org.dspace.app.requestitem.RequestItemHelpdeskStrategy; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; @@ -46,6 +48,7 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.versioning.Version; import org.dspace.versioning.factory.VersionServiceFactory; @@ -76,7 +79,7 @@ public class DeleteEPersonSubmitterIT extends AbstractControllerIntegrationTest protected RequestItemAuthorExtractor requestItemAuthorExtractor = DSpaceServicesFactory.getInstance() .getServiceManager() - .getServiceByName("org.dspace.app.requestitem.RequestItemAuthorExtractor", + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), RequestItemAuthorExtractor.class); @@ -85,15 +88,8 @@ public class DeleteEPersonSubmitterIT extends AbstractControllerIntegrationTest private EPerson submitterForVersion2; private EPerson workflowUser; - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DeleteEPersonSubmitterIT.class); + private static final Logger log = LogManager.getLogger(); - /** - * This method will be run before every test as per @Before. It will - * initialize resources required for the tests. - * - * Other methods can be annotated with @Before here or in subclasses but no - * execution order is guaranteed - */ @Before @Override public void setUp() throws Exception { @@ -114,8 +110,8 @@ public void setUp() throws Exception { /** - * This test verifies that when the submitter Eperson is deleted, the delete succeeds and the item will have - * 'null' as submitter + * This test verifies that when the submitter Eperson is deleted, the delete + * succeeds and the item will have 'null' as submitter. * * @throws Exception */ @@ -140,12 +136,21 @@ public void testArchivedItemSubmitterDelete() throws Exception { assertNull(retrieveItemSubmitter(installItem.getID())); + // Don't depend on external configuration; set up helpdesk as needed. + final String HELPDESK_EMAIL = "dspace-help@example.com"; + final String HELPDESK_NAME = "Help Desk"; + ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + configurationService.setProperty("mail.helpdesk", HELPDESK_EMAIL); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + // Test it. Item item = itemService.find(context, installItem.getID()); List requestItemAuthor = requestItemAuthorExtractor.getRequestItemAuthor(context, item); - assertEquals("Help Desk", requestItemAuthor.get(0).getFullName()); - assertEquals("dspace-help@myu.edu", requestItemAuthor.get(0).getEmail()); + assertEquals(HELPDESK_NAME, requestItemAuthor.get(0).getFullName()); + assertEquals(HELPDESK_EMAIL, requestItemAuthor.get(0).getEmail()); } /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index f52adc5daa5f..3fec8e4158dd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -8,6 +8,9 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -31,7 +34,8 @@ private BrowseIndexMatcher() { } public static Matcher subjectBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.subject.*")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -44,7 +48,8 @@ public static Matcher subjectBrowseIndex(final String order) { public static Matcher titleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -56,7 +61,8 @@ public static Matcher titleBrowseIndex(final String order) { public static Matcher contributorBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -69,7 +75,8 @@ public static Matcher contributorBrowseIndex(final String order) public static Matcher dateIssuedBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.date.issued")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -154,4 +161,22 @@ public static Matcher eqtitleBrowseIndex(final String order) { hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/eqtitle/items")) ); } + + public static Matcher hierarchicalBrowseIndex(final String vocabulary) { + return allOf( + hasJsonPath("$.metadata", contains("dc.subject")), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), + hasJsonPath("$.facetType", equalToIgnoringCase("subject")), + hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), + hasJsonPath("$._links.vocabulary.href", + is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), + hasJsonPath("$._links.items.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))), + hasJsonPath("$._links.entries.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))), + hasJsonPath("$._links.self.href", + is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary))) + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index c1223adf1738..29ee72e6c539 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -130,6 +130,17 @@ public static Matcher hasContentInOriginalBundleFacet(boolean ha ); } + public static Matcher matchFacet(boolean hasNext, String name, String facetType) { + return allOf( + hasJsonPath("$.name", is(name)), + hasJsonPath("$.facetType", is(facetType)), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name)) + ); + } + + /** * Check that a facet over the dc.type exists and match the default configuration * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java index 012c7f8f3eeb..9d5ae5d3bcf0 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java @@ -61,6 +61,16 @@ public static Matcher entrySubject(String label, int count) { ); } + public static Matcher matchEntry(String facet, String label, int count) { + return allOf( + hasJsonPath("$.label", is(label)), + hasJsonPath("$.type", is("discover")), + hasJsonPath("$.count", is(count)), + hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")), + hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals")) + ); + } + public static Matcher entrySubjectWithAuthority(String label, String authority, int count) { return allOf( diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java new file mode 100644 index 000000000000..24a94a4d4bb7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CollectionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunityCollectionLinkRepository} + */ +public class CommunityCollectionLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Collection collection1; + Collection collection2; + Collection collection3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + collection1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + collection2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + collection3 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getCollections_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection1), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection3) + ))); + } + + @Test + public void getCollections_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection3), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java new file mode 100644 index 000000000000..aa3b1c072187 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CommunityMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunitySubcommunityLinkRepository} + */ +public class CommunitySubcommunityLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Community subCommunity1; + Community subCommunity2; + Community subCommunity3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + subCommunity1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 1") + .build(); + subCommunity2 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 2") + .build(); + subCommunity3 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getSubCommunities_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity1), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity3) + ))); + } + + @Test + public void getSubCommunities_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity3), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java new file mode 100644 index 000000000000..6d1d242cad7f --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/signposting/controller/LinksetRestControllerIT.java @@ -0,0 +1,990 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.signposting.controller; + +import static org.dspace.content.MetadataSchemaEnum.PERSON; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.InputStream; +import java.text.DateFormat; +import java.text.MessageFormat; +import java.text.SimpleDateFormat; +import java.util.Date; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.builder.WorkspaceItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.authority.Choices; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.util.SimpleMapConverter; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +public class LinksetRestControllerIT extends AbstractControllerIntegrationTest { + + private static final String doiPattern = "https://doi.org/{0}"; + private static final String orcidPattern = "http://orcid.org/{0}"; + private static final String doi = "10.1007/978-3-642-35233-1_18"; + private static final String PERSON_ENTITY_TYPE = "Person"; + + private Collection collection; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private MetadataAuthorityService metadataAuthorityService; + + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + + @Autowired + private ItemService itemService; + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private RelationshipTypeService relationshipTypeService; + + @Autowired + private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + context.restoreAuthSystemState(); + } + + @Test + public void findAllItemsLinksets() throws Exception { + getClient().perform(get("/signposting")) + .andExpect(status().isMethodNotAllowed()); + } + + @Test + public void findOneItemJsonLinksets() throws Exception { + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithType() throws Exception { + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + String articleUri = mapConverterDSpaceToSchemaOrgUri.getValue("Article"); + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .withType("Article") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].type", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].type[0].href", + Matchers.hasToString("https://schema.org/AboutPage"))) + .andExpect(jsonPath("$.linkset[0].type[1].href", + Matchers.hasToString(articleUri))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithLicence() throws Exception { + String licenceUrl = "https://exmple.com/licence"; + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata(MetadataSchemaEnum.DC.getName(), "rights", "uri", licenceUrl) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(2))) + .andExpect(jsonPath("$.linkset[0].type[0].href", + Matchers.hasToString("https://schema.org/AboutPage"))) + .andExpect(jsonPath("$.linkset[0].license[0].href", + Matchers.hasToString(licenceUrl))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithBitstreams() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + String bitstream2Content = "ThisIsSomeAlternativeDummyText"; + String bitstream2MimeType = "application/pdf"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstream2Content, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 2") + .withDescription("description") + .withMimeType(bitstream2MimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(4))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].item[0].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[0].type", + Matchers.hasToString(bitstream1MimeType))) + .andExpect(jsonPath("$.linkset[0].item[1].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream2.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[1].type", + Matchers.hasToString(bitstream2MimeType))) + .andExpect(jsonPath("$.linkset[0].anchor", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[1].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[2].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[2].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[2].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[2].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream2.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[3].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[3].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[3].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemJsonLinksetsWithBitstreamsFromDifferentBundles() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is, Constants.DEFAULT_BUNDLE_NAME) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream2 = BitstreamBuilder.createBitstream(context, item, is, "TEXT") + .withName("Bitstream 2") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream3 = BitstreamBuilder.createBitstream(context, item, is, "THUMBNAIL") + .withName("Bitstream 3") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + try (InputStream is = IOUtils.toInputStream("test", CharEncoding.UTF_8)) { + Bitstream bitstream4 = BitstreamBuilder.createBitstream(context, item, is, "LICENSE") + .withName("Bitstream 4") + .withDescription("description") + .withMimeType("application/pdf") + .build(); + } + + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.linkset", + Matchers.hasSize(3))) + .andExpect(jsonPath("$.linkset[0].cite-as[0].href", + Matchers.hasToString(url + "/handle/" + item.getHandle()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].describedby[0].type", + Matchers.hasToString(mimeType))) + .andExpect(jsonPath("$.linkset[0].item", + Matchers.hasSize(1))) + .andExpect(jsonPath("$.linkset[0].item[0].href", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[0].item[0].type", + Matchers.hasToString(bitstream1MimeType))) + .andExpect(jsonPath("$.linkset[0].anchor", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[0].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[0].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].collection[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[1].collection[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[1].linkset[0].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString()))) + .andExpect(jsonPath("$.linkset[1].linkset[0].type", + Matchers.hasToString("application/linkset"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].href", + Matchers.hasToString(url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json"))) + .andExpect(jsonPath("$.linkset[1].linkset[1].type", + Matchers.hasToString("application/linkset+json"))) + .andExpect(jsonPath("$.linkset[1].anchor", + Matchers.hasToString(url + "/bitstreams/" + bitstream1.getID() + "/download"))) + .andExpect(jsonPath("$.linkset[2].describes[0].href", + Matchers.hasToString(url + "/entities/publication/" + item.getID()))) + .andExpect(jsonPath("$.linkset[2].describes[0].type", + Matchers.hasToString("text/html"))) + .andExpect(jsonPath("$.linkset[2].anchor", + Matchers.hasToString(url + "/" + signpostingUrl + "/describedby/" + item.getID()))) + .andExpect(header().stringValues("Content-Type", "application/linkset+json;charset=UTF-8")); + } + + @Test + public void findOneItemThatIsInWorkspaceJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + itemService.addMetadata(context, workspaceItem.getItem(), "dc", "identifier", "doi", Item.ANY, doi); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + workspaceItem.getItem().getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneWithdrawnItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withdrawn() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneEmbargoItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withIssueDate("2017-11-18") + .withEmbargoPeriod("2 week") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneRestrictedItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withReaderGroup(internalGroup) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneUnDiscoverableItemJsonLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID() + "/json")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findOneBitstreamJsonLinksets() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + bitstream.getID() + "/json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneCollectionJsonLinksets() throws Exception { + getClient().perform(get("/signposting/linksets/" + collection.getID() + "/json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneCommunityJsonLinksets() throws Exception { + getClient().perform(get("/signposting/linksets/" + parentCommunity.getID() + "/json")) + .andExpect(status().isNotFound()); + } + + @Test + public void findOneItemLsetLinksets() throws Exception { + String bitstream1Content = "ThisIsSomeDummyText"; + String bitstream1MimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .build(); + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstream1Content, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream 1") + .withDescription("description") + .withMimeType(bitstream1MimeType) + .build(); + } + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + String siteAsRelation = "<" + url + "/handle/" + item.getHandle() + "> ; rel=\"cite-as\" ; anchor=\"" + + url + "/entities/publication/" + item.getID() + "\" ,"; + String itemRelation = "<" + url + "/bitstreams/" + bitstream1.getID() + + "/download> ; rel=\"item\" ; " + "type=\"text/plain\" ; anchor=\"" + url + "/entities/publication/" + + item.getID() + "\" ,"; + String typeRelation = " ; rel=\"type\" ; anchor=\"" + + url + "/entities/publication/" + item.getID() + "\" ,"; + String linksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "> ; rel=\"linkset\" ; type=\"application/linkset\" ;" + + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String jsonLinksetRelation = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID().toString() + + "/json> ; rel=\"linkset\" ; type=\"application/linkset+json\" ;" + + " anchor=\"" + url + "/entities/publication/" + item.getID() + "\" ,"; + String describedByRelation = "<" + url + "/" + signpostingUrl + "/describedby/" + item.getID() + + "> ; rel=\"describedby\" ;" + " type=\"" + mimeType + "\" ; anchor=\"" + url + + "/entities/publication/" + item.getID() + "\" ,"; + + String bitstreamCollectionLink = "<" + url + "/entities/publication/" + item.getID() + "> ;" + + " rel=\"collection\" ; type=\"text/html\" ; anchor=\"" + url + "/bitstreams/" + + bitstream1.getID() + "/download\""; + String bitstreamLinksetLink = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID() + "> ; " + + "rel=\"linkset\" ; type=\"application/linkset\" ; " + + "anchor=\"" + url + "/bitstreams/" + bitstream1.getID() + "/download\""; + String bitstreamLinksetJsonLink = "<" + url + "/" + signpostingUrl + "/linksets/" + item.getID() + "/json> ; " + + "rel=\"linkset\" ; type=\"application/linkset+json\" ; " + + "anchor=\"" + url + "/bitstreams/" + bitstream1.getID() + "/download\""; + + String describesMetadataLink = "<" + url + "/entities/publication/" + item.getID() + "> ; " + + "rel=\"describes\" ; type=\"text/html\" ; " + + "anchor=\"" + url + "/" + signpostingUrl + "/describedby/" + item.getID() + "\""; + + getClient().perform(get("/signposting/linksets/" + item.getID())) + .andExpect(content().string(Matchers.containsString(siteAsRelation))) + .andExpect(content().string(Matchers.containsString(itemRelation))) + .andExpect(content().string(Matchers.containsString(typeRelation))) + .andExpect(content().string(Matchers.containsString(linksetRelation))) + .andExpect(content().string(Matchers.containsString(jsonLinksetRelation))) + .andExpect(content().string(Matchers.containsString(describedByRelation))) + .andExpect(content().string(Matchers.containsString(bitstreamCollectionLink))) + .andExpect(content().string(Matchers.containsString(bitstreamLinksetLink))) + .andExpect(content().string(Matchers.containsString(bitstreamLinksetJsonLink))) + .andExpect(content().string(Matchers.containsString(describesMetadataLink))) + .andExpect(header().stringValues("Content-Type", "application/linkset;charset=UTF-8")); + } + + @Test + public void findOneUnDiscoverableItemLsetLinksets() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/linksets/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findTypedLinkForItemWithAuthor() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + String orcidValue = "orcidValue"; + + context.turnOffAuthorisationSystem(); + + Collection personCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType(PERSON_ENTITY_TYPE) + .build(); + + Item author = ItemBuilder.createItem(context, personCollection) + .withPersonIdentifierLastName("familyName") + .withPersonIdentifierFirstName("firstName") + .withMetadata(PERSON.getName(), "identifier", "orcid", orcidValue) + .build(); + Item publication = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .withAuthor("John", author.getID().toString(), Choices.CF_ACCEPTED) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, publication, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + + EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, PERSON_ENTITY_TYPE).build(); + RelationshipType isAuthorOfPublicationRelationshipType = + RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publicationEntityType, authorEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null).build(); + isAuthorOfPublicationRelationshipType.setTilted(RelationshipType.Tilted.LEFT); + isAuthorOfPublicationRelationshipType = + relationshipTypeService.create(context, isAuthorOfPublicationRelationshipType); + RelationshipBuilder.createRelationshipBuilder(context, publication, author, + isAuthorOfPublicationRelationshipType).build(); + + context.restoreAuthSystemState(); + + String url = configurationService.getProperty("dspace.ui.url"); + String signpostingUrl = configurationService.getProperty("signposting.path"); + String mimeType = "application/vnd.datacite.datacite+xml"; + String dcIdentifierUriMetadataValue = itemService + .getMetadataFirstValue(publication, "dc", "identifier", "uri", Item.ANY); + + getClient().perform(get("/signposting/links/" + publication.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(7))) + .andExpect(jsonPath("$[?(@.href == '" + MessageFormat.format(orcidPattern, orcidValue) + "' " + + "&& @.rel == 'author')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/describedby/" + + publication.getID() + "' " + + "&& @.rel == 'describedby' " + + "&& @.type == '" + mimeType + "')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + dcIdentifierUriMetadataValue + "' " + + "&& @.rel == 'cite-as')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/bitstreams/" + bitstream.getID() + "/download' " + + "&& @.rel == 'item' " + + "&& @.type == 'text/plain')]").exists()) + .andExpect(jsonPath("$[?(@.href == 'https://schema.org/AboutPage' " + + "&& @.rel == 'type')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + + publication.getID().toString() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + url + "/" + signpostingUrl + "/linksets/" + + publication.getID().toString() + "/json' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); + } + + @Test + public void findTypedLinkForBitstream() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(3))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamWithType() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + bitstreamService.addMetadata(context, bitstream, "dc", "type", null, Item.ANY, "Article"); + + context.restoreAuthSystemState(); + + String uiUrl = configurationService.getProperty("dspace.ui.url"); + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", + Matchers.hasSize(4))) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/entities/publication/" + item.getID() + "' " + + "&& @.rel == 'collection' " + + "&& @.type == 'text/html')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "' " + + "&& @.rel == 'linkset' " + + "&& @.type == 'application/linkset')]").exists()) + .andExpect(jsonPath("$[?(@.href == '" + uiUrl + "/signposting/linksets/" + item.getID() + "/json" + + "' && @.rel == 'linkset' " + + "&& @.type == 'application/linkset+json')]").exists()) + .andExpect(jsonPath("$[?(@.href == 'https://schema.org/ScholarlyArticle' " + + "&& @.rel == 'type')]").exists()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForRestrictedBitstream() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .withReaderGroup(internalGroup) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamUnderEmbargo() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item Test") + .withIssueDate("2017-10-17") + .withMetadata("dc", "identifier", "doi", doi) + .build(); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .withEmbargoPeriod("6 months") + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForBitstreamOfWorkspaceItem() throws Exception { + String bitstreamContent = "ThisIsSomeDummyText"; + String bitstreamMimeType = "text/plain"; + + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + Item item = workspaceItem.getItem(); + itemService.addMetadata(context, item, "dc", "identifier", "doi", Item.ANY, doi); + + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, workspaceItem.getItem(), is) + .withName("Bitstream") + .withDescription("description") + .withMimeType(bitstreamMimeType) + .build(); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + bitstream.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void findTypedLinkForUnDiscoverableItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/links/" + item.getID())) + .andExpect(status().isUnauthorized()); + + DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } + + @Test + public void getDescribedBy() throws Exception { + context.turnOffAuthorisationSystem(); + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + String currentDateInFormat = dateFormat.format(new Date()); + String title = "Item Test"; + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "identifier", "doi", doi) + .build(); + String responseMimeType = "application/vnd.datacite.datacite+xml"; + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isOk()) + .andExpect(content().string(Matchers.containsString(title))) + .andExpect(header().stringValues("Content-Type", responseMimeType + ";charset=UTF-8")); + } + + @Test + public void getDescribedByItemThatIsInWorkspace() throws Exception { + context.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .build(); + itemService.addMetadata(context, workspaceItem.getItem(), "dc", "identifier", "doi", Item.ANY, doi); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + workspaceItem.getItem().getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByWithdrawnItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withdrawn() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByEmbargoItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withIssueDate("2017-11-18") + .withEmbargoPeriod("2 week") + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByRestrictedItem() throws Exception { + context.turnOffAuthorisationSystem(); + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .withReaderGroup(internalGroup) + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void getDescribedByUnDiscoverableItem() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Withdrawn Item") + .withMetadata("dc", "identifier", "doi", doi) + .makeUnDiscoverable() + .build(); + context.restoreAuthSystemState(); + + getClient().perform(get("/signposting/describedby/" + item.getID())) + .andExpect(status().isUnauthorized()); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 6c9544d2f927..e21f395f0907 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -115,6 +115,8 @@ public void setUp() throws Exception { sortConfiguration.setSortFields(listSortField); + sortConfiguration.setDefaultSortField(defaultSort); + discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); @@ -167,6 +169,16 @@ public void testSortByScore() throws Exception { page.getOffset(), "SCORE", "ASC"); } + @Test + public void testSortByDefaultSortField() throws Exception { + page = PageRequest.of(2, 10); + restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + + verify(discoverQueryBuilder, times(1)) + .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), + page.getPageSize(), page.getOffset(), null, null); + } + @Test(expected = DSpaceBadRequestException.class) public void testCatchIllegalArgumentException() throws Exception { when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java index 27c37f1487e4..ccb7d43a2378 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java @@ -11,7 +11,6 @@ import org.apache.commons.cli.Options; import org.dspace.app.rest.converter.ScriptConverter; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -28,10 +27,6 @@ public void setDspaceRunnableClass(final Class dspaceRunnableClass) { } - public boolean isAllowedToExecute(final Context context) { - return true; - } - public Options getOptions() { Options options = new Options(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java index a528f4351356..3e40a8559482 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java @@ -14,6 +14,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.File; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.concurrent.atomic.AtomicReference; @@ -29,13 +30,19 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; +import org.dspace.content.Site; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -49,6 +56,9 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest { @Autowired private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; + @Autowired + private ScriptService scriptService; + private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME; private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/curate/" + ProcessRest.PLURAL_NAME; @@ -371,6 +381,263 @@ public void curateScript_EPersonInParametersFails() throws Exception { } } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins can only run the curate script on their own objects + */ + @Test + public void securityCurateTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Community anotherCommunity = CommunityBuilder.createCommunity(context) + .withName("Another Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Collection anotherCollection = CollectionBuilder.createCollection(context, anotherCommunity) + .withName("AnotherCollection") + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + Item anotherItem = ItemBuilder.createItem(context, anotherCollection) + .withTitle("Another Test item to curate").build(); + Site site = ContentServiceFactory.getInstance().getSiteService().findSite(context); + context.restoreAuthSystemState(); + LinkedList siteParameters = new LinkedList<>(); + siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle())); + siteParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList comParameters = new LinkedList<>(); + comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle())); + comParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherComParameters = new LinkedList<>(); + anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle())); + anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList colParameters = new LinkedList<>(); + colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle())); + colParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherColParameters = new LinkedList<>(); + anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle())); + anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList itemParameters = new LinkedList<>(); + itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle())); + itemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherItemParameters = new LinkedList<>(); + anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle())); + anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + + List listCurateSite = siteParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCom = comParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCom = anotherComParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCol = colParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCol = anotherColParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listItem = itemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherItem = anotherItemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + String adminToken = getAuthToken(admin.getEmail(), password); + List acceptableProcessStatuses = new LinkedList<>(); + acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED, + ProcessStatus.RUNNING, + ProcessStatus.COMPLETED)); + + AtomicReference idSiteRef = new AtomicReference<>(); + AtomicReference idComRef = new AtomicReference<>(); + AtomicReference idComColRef = new AtomicReference<>(); + AtomicReference idComItemRef = new AtomicReference<>(); + AtomicReference idColRef = new AtomicReference<>(); + AtomicReference idColItemRef = new AtomicReference<>(); + AtomicReference idItemRef = new AtomicReference<>(); + + ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate"); + // we should be able to start the curate script with all our admins on the respective dso + try { + // start a process as general admin + getClient(adminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(admin.getID()), + siteParameters, + acceptableProcessStatuses)))) + .andDo(result -> idSiteRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // check with the com admin + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + comParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be able to run the curate also over the children collection and item + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be NOT able to run the curate over other com, col or items + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCom))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the col admin + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the col admin should be able to run the curate also over the owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // the col admin should be NOT able to run the curate over the community nor another collection nor + // on a not owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the item admin + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(itemAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the item admin should be NOT able to run the curate over the community nor the collection nor + // on a not owned item + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + } finally { + ProcessBuilder.deleteProcess(idSiteRef.get()); + ProcessBuilder.deleteProcess(idComRef.get()); + ProcessBuilder.deleteProcess(idComColRef.get()); + ProcessBuilder.deleteProcess(idComItemRef.get()); + ProcessBuilder.deleteProcess(idColRef.get()); + ProcessBuilder.deleteProcess(idColItemRef.get()); + ProcessBuilder.deleteProcess(idItemRef.get()); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af762..632b4e2f83f4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml new file mode 100644 index 000000000000..4f921658e32b --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b5ab65e3b2b2253cd3a + + 36708638 + + + "10 1016 j nepr 2023 103548"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml new file mode 100644 index 000000000000..1ff9570777a7 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b12ccf058150336d6a8 + + 21975942 + + + "10 1002 0471142905 hg0610s71"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml new file mode 100644 index 000000000000..666fb1e7d550 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml @@ -0,0 +1,194 @@ + + + + + + 36708638 + + 2023 + 02 + 23 + + + 2023 + 02 + 23 + +
    + + 1873-5223 + + 67 + + 2023 + Feb + + + Nurse education in practice + Nurse Educ Pract + + Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review. + + 103548 + 103548 + + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing. + Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice. + A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established. + The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text. + 1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions. + Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed. + Copyright © 2023 Elsevier Ltd. All rights reserved. + + + + Giuffrida + Silvia + S + + Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch. + + + + Silano + Verdiana + V + + Nursing Direction of Settore Anziani Città di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it. + + + + Ramacciati + Nicola + N + + Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it. + + + + Prandi + Cesarina + C + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch. + + + + Baldon + Alessia + A + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch. + + + + Bianchi + Monica + M + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch. + + + + eng + + Journal Article + Review + + + 2023 + 01 + 17 + +
    + + Scotland + Nurse Educ Pract + 101090848 + 1471-5953 + + IM + + + Humans + + + Advanced Practice Nursing + + + Learning + + + Curriculum + + + Thinking + + + Clinical Reasoning + + + Students, Nursing + + + + Advanced practice nursing + Clinical reasoning + Critical thinking + Educational strategies + Nursing education + Teaching methodology + + Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper. +
    + + + + 2022 + 11 + 9 + + + 2022 + 12 + 17 + + + 2023 + 1 + 10 + + + 2023 + 1 + 29 + 6 + 0 + + + 2023 + 2 + 25 + 6 + 0 + + + 2023 + 1 + 28 + 18 + 7 + + + ppublish + + 36708638 + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + +
    +
    \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml new file mode 100644 index 000000000000..949d3b1250b2 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml @@ -0,0 +1,132 @@ + + + + + + 21975942 + + 2012 + 01 + 13 + + + 2016 + 10 + 21 + +
    + + 1934-8258 + + Chapter 6 + + 2011 + Oct + + + Current protocols in human genetics + Curr Protoc Hum Genet + + Searching NCBI Databases Using Entrez. + + Unit6.10 + Unit6.10 + + 10.1002/0471142905.hg0610s71 + + One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed. + © 2011 by John Wiley & Sons, Inc. + + + + Gibney + Gretchen + G + + + Baxevanis + Andreas D + AD + + + eng + + Journal Article + +
    + + United States + Curr Protoc Hum Genet + 101287858 + 1934-8258 + + IM + + + Animals + + + Database Management Systems + + + Databases, Factual + + + Humans + + + Information Storage and Retrieval + methods + + + Internet + + + Molecular Conformation + + + National Library of Medicine (U.S.) + + + PubMed + + + United States + + + User-Computer Interface + + +
    + + + + 2011 + 10 + 7 + 6 + 0 + + + 2011 + 10 + 7 + 6 + 0 + + + 2012 + 1 + 14 + 6 + 0 + + + ppublish + + 21975942 + 10.1002/0471142905.hg0610s71 + + +
    +
    \ No newline at end of file diff --git a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java index afd1627f5ee3..6cffa7ee66d5 100644 --- a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java +++ b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java @@ -7,6 +7,8 @@ */ package org.dspace.servicemanager; +import static org.apache.logging.log4j.Level.DEBUG; + import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; @@ -21,6 +23,8 @@ import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.kernel.Activator; import org.dspace.kernel.config.SpringLoader; import org.dspace.kernel.mixins.ConfigChangeListener; @@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceManagerReadyAware; import org.dspace.servicemanager.config.DSpaceConfigurationService; import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.dspace.utils.CallStackUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.NoSuchBeanDefinitionException; @@ -44,7 +47,7 @@ */ public final class DSpaceServiceManager implements ServiceManagerSystem { - private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class); + private static Logger log = LogManager.getLogger(); public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml"; public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml"; @@ -426,9 +429,10 @@ public T getServiceByName(String name, Class type) { service = (T) applicationContext.getBean(name, type); } catch (BeansException e) { // no luck, try the fall back option - log.warn( + log.debug( "Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", name, e); + + " Will try to look up bean by type next.", name); + CallStackUtils.logCaller(log, DEBUG); service = null; } } else { @@ -437,8 +441,9 @@ public T getServiceByName(String name, Class type) { service = (T) applicationContext.getBean(type.getName(), type); } catch (BeansException e) { // no luck, try the fall back option - log.warn("Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", type.getName(), e); + log.debug("Unable to locate bean by name or id={}." + + " Will try to look up bean by type next.", type::getName); + CallStackUtils.logCaller(log, DEBUG); service = null; } } diff --git a/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java new file mode 100644 index 000000000000..cb60a223a184 --- /dev/null +++ b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.utils; + +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; + +import java.lang.StackWalker.StackFrame; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; + +/** + * Utility methods for manipulating call stacks. + * + * @author mwood + */ +public class CallStackUtils { + private CallStackUtils() {} + + /** + * Log the class, method and line of the caller's caller. + * + * @param log logger to use. + * @param level log at this level, if enabled. + */ + static public void logCaller(Logger log, Level level) { + if (log.isEnabled(level)) { + StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE); + StackFrame caller = stack.walk(stream -> stream.skip(2) + .findFirst() + .get()); + String callerClassName = caller.getDeclaringClass().getCanonicalName(); + String callerMethodName = caller.getMethodName(); + int callerLine = caller.getLineNumber(); + log.log(level, "Called from {}.{} line {}.", + callerClassName, callerMethodName, callerLine); + } + } +} diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index eb2ae6288a22..460c1d73c9c5 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -24,25 +24,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - - diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 908252119458..ab9d6cf2f26b 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -22,38 +22,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - postgres-support - - - !db.name - - - - - org.postgresql - postgresql - - - - - javax.servlet diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 3bc1867277ab..6fef615a4afe 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -75,6 +75,9 @@ + + @@ -666,6 +669,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1133,11 +1170,11 @@ - + + select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/> @@ -1215,7 +1252,7 @@ - + + + + + + + + + + + open access + + + embargoed access + + + restricted access + + + metadata only access + + + + + diff --git a/dspace/config/local.cfg.EXAMPLE b/dspace/config/local.cfg.EXAMPLE index 78f5bad26770..3028b3b9b24c 100644 --- a/dspace/config/local.cfg.EXAMPLE +++ b/dspace/config/local.cfg.EXAMPLE @@ -71,23 +71,15 @@ solr.server = http://localhost:8983/solr ########################## # DATABASE CONFIGURATION # ########################## -# DSpace only supports two database types: PostgreSQL or Oracle -# PostgreSQL is highly recommended. -# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 +# DSpace ONLY supports PostgreSQL at this time. # URL for connecting to database -# * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace -# JDBC Driver -# * For Postgres: org.postgresql.Driver -# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver +# JDBC Driver for PostgreSQL db.driver = org.postgresql.Driver -# Database Dialect (for Hibernate) -# * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect +# PostgreSQL Database Dialect (for Hibernate) db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -95,9 +87,7 @@ db.username = dspace db.password = dspace # Database Schema name -# * For Postgres, this is often "public" (default schema) -# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, -# so this may be set to ${db.username} in most scenarios. +# For PostgreSQL, this is often "public" (default schema) db.schema = public ## Connection pool parameters diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 1dff33f986b3..ea955c183663 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -25,6 +25,10 @@ rest.projections.full.max = 2 # This property determines the max embed depth for a SpecificLevelProjection rest.projection.specificLevel.maxEmbed = 5 +# This property determines the max amount of rest operations that can be performed at the same time, for example when +# batch removing bitstreams. The default value is set to 1000. +rest.patch.operations.limit = 1000 + # Define which configuration properties are exposed through the http:///api/config/properties/ # rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will # respond that the property wasn't found. This property can be defined multiple times to allow access to multiple diff --git a/dspace/config/modules/signposting.cfg b/dspace/config/modules/signposting.cfg new file mode 100644 index 000000000000..fba80da41481 --- /dev/null +++ b/dspace/config/modules/signposting.cfg @@ -0,0 +1,35 @@ +#---------------------------------------------------------------# +#------------------SIGNPOSTING CONFIGURATIONS-------------------# + +# Allowed Cross-Origin-Resource-Sharing (CORS) origins (in "Access-Control-Allow-Origin" header). +# Only these origins (client URLs) can successfully authenticate with your REST API. +# Defaults to ${dspace.ui.url} if unspecified (as the UI must have access to the REST API). +# Multiple allowed origin URLs may be comma separated. Wildcard value (*) is NOT SUPPORTED. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.cors.allowed-origins = ${dspace.ui.url} + +# Whether or not to allow credentials (e.g. cookies) sent by the client/browser in CORS +# requests (in "Access-Control-Allow-Credentials" header). +# For DSpace, we default this to "true" to support external authentication via Shibboleth (and similar). +# However, if any of the "allowed-origins" above are *not* trusted, you may choose to set this to "false" +# for additional security. Defaults to "true" if unspecified. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.cors.allow-credentials = true + +# Path where signposting controller is available +# Defaults to "signposting", which means the signposting controller would be available +# at ${dspace.server.url}/signposting +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.path = signposting + +# Whether or not to enable the signposting controller +# When "true", the signposting controller is accessible on ${signposting.path} +# When "false" or commented out, signposting is disabled/inaccessible. +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +signposting.enabled = true + +# Name of crosswalk to use for handling of 'describedby' links. +signposting.describedby.crosswalk-name = DataCite + +# Mime-type of response of handling of 'describedby' links. +signposting.describedby.mime-type = application/vnd.datacite.datacite+xml \ No newline at end of file diff --git a/dspace/config/registries/bitstream-formats.xml b/dspace/config/registries/bitstream-formats.xml index e7badbcdecd0..3515773fd742 100644 --- a/dspace/config/registries/bitstream-formats.xml +++ b/dspace/config/registries/bitstream-formats.xml @@ -115,6 +115,15 @@ csv + + text/vtt + WebVTT + Web Video Text Tracks Format + 1 + false + vtt + + application/msword Microsoft Word @@ -201,7 +210,7 @@ image/png - image/png + PNG Portable Network Graphics 1 false @@ -218,6 +227,15 @@ tif + + image/jp2 + JPEG2000 + JPEG 2000 Image File Format + 1 + false + jp2 + + audio/x-aiff AIFF @@ -791,4 +809,22 @@ mp3 + + image/webp + WebP + WebP is a modern image format that provides superior lossless and lossy compression for images on the web. + 1 + false + webp + + + + image/avif + AVIF + AV1 Image File Format (AVIF) is an open, royalty-free image file format specification for storing images or image sequences compressed with AV1 in the HEIF container format. + 1 + false + avif + + diff --git a/dspace/config/spring/api/access-conditions.xml b/dspace/config/spring/api/access-conditions.xml index 828b31d425df..fcd0b54c7236 100644 --- a/dspace/config/spring/api/access-conditions.xml +++ b/dspace/config/spring/api/access-conditions.xml @@ -75,4 +75,34 @@
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace/config/spring/api/arxiv-integration.xml b/dspace/config/spring/api/arxiv-integration.xml index 115f13152a21..1ec1fd503310 100644 --- a/dspace/config/spring/api/arxiv-integration.xml +++ b/dspace/config/spring/api/arxiv-integration.xml @@ -56,10 +56,12 @@ - + + + diff --git a/dspace/config/spring/api/core-services.xml b/dspace/config/spring/api/core-services.xml index f675c7bfc480..cb0a64a7df7c 100644 --- a/dspace/config/spring/api/core-services.xml +++ b/dspace/config/spring/api/core-services.xml @@ -109,9 +109,9 @@ - + - + @@ -136,6 +136,8 @@ + + diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml index 35712a2983fb..f1363fae322e 100644 --- a/dspace/config/spring/api/crossref-integration.xml +++ b/dspace/config/spring/api/crossref-integration.xml @@ -30,6 +30,7 @@ + @@ -69,8 +70,11 @@ - + + + + @@ -134,6 +138,14 @@ + + + + + + + + diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index ed9e94021895..c36b10d23a0a 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -60,7 +60,7 @@ dc.contributor.author dc.contributor.editor - + @@ -98,9 +98,10 @@ + - + @@ -996,7 +997,7 @@ - + @@ -3329,7 +3330,7 @@ - + diff --git a/dspace/config/spring/api/identifier-service.xml b/dspace/config/spring/api/identifier-service.xml index 39754edce46b..90c2b13b4fd1 100644 --- a/dspace/config/spring/api/identifier-service.xml +++ b/dspace/config/spring/api/identifier-service.xml @@ -20,11 +20,9 @@ The VersionedHandleIdentifierProvider creates a new versioned handle for every new version. --> - + - - + + + This sends various emails between the requestor and the grantor. + + + + - + id="org.dspace.app.requestitem.RequestItemMetadataStrategy"> + + Get recipients from an item metadata field. + - + id="org.dspace.app.requestitem.RequestItemHelpdeskStrategy"> + + HelpDesk to instead get RequestItem emails + + + id='org.dspace.app.requestitem.CollectionAdministratorsRequestItemStrategy'> - Send request emails to administrators of an Item's owning - Collection. + Send request emails to administrators of an Item's owning + Collection. + id='org.dspace.app.requestitem.CombiningRequestItemStrategy'> - Execute multiple strategies and concatenate their lists of - recipients. Mail will go to all members of the combined list. + Execute multiple strategies and concatenate their lists of + recipients. Mail will go to all members of the combined list. - A list of RequestItemAuthorExtractor beans + A list of RequestItemAuthorExtractor beans + + + + + diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 39b9f1e7151b..4eb482d9e93a 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -61,22 +61,6 @@ - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - unit-test-environment @@ -280,7 +264,7 @@ jar - + diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index 05c3a4f5098e..61c1fb16d774 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -90,24 +90,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 470681a555d1..9cddbeb2a309 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -244,22 +244,6 @@ just adding new jar in the classloader - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index 5f0149b8bd47..b9c78f1fd847 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -299,7 +299,7 @@ - + @@ -375,13 +375,13 @@ - + - + - + search.uniqueid diff --git a/dspace/src/main/docker-compose/README.md b/dspace/src/main/docker-compose/README.md index a83a466bdbba..35a6e6055433 100644 --- a/dspace/src/main/docker-compose/README.md +++ b/dspace/src/main/docker-compose/README.md @@ -1,4 +1,4 @@ -# Docker Compose Resources +# Docker Compose files for DSpace Backend *** :warning: **THESE IMAGES ARE NOT PRODUCTION READY** The below Docker Compose images/resources were built for development/testing only. Therefore, they may not be fully secured or up-to-date, and should not be used in production. @@ -6,27 +6,51 @@ If you wish to run DSpace on Docker in production, we recommend building your own Docker images. You are welcome to borrow ideas/concepts from the below images in doing so. But, the below images should not be used "as is" in any production scenario. *** -## root directory Resources + +## Overview +The scripts in this directory can be used to start the DSpace REST API (backend) in Docker. +Optionally, the DSpace User Interface (frontend) may also be started in Docker. + +For additional options/settings in starting the User Interface (frontend) in Docker, see the Docker Compose +documentation for the frontend: https://github.com/DSpace/dspace-angular/blob/main/docker/README.md + +## Primary Docker Compose Scripts (in root directory) +The root directory of this project contains the primary Dockerfiles & Docker Compose scripts +which are used to start the backend. + - docker-compose.yml - - Docker compose file to orchestrate DSpace 7 REST components -- docker-compose-cli - - Docker compose file to run DSpace CLI tasks within a running DSpace instance in Docker + - Docker compose file to orchestrate DSpace REST API (backend) components. + - Uses the `Dockerfile` in the same directory. +- docker-compose-cli.yml + - Docker compose file to run DSpace CLI (Command Line Interface) tasks within a running DSpace instance in Docker. See instructions below. + - Uses the `Dockerfile.cli` in the same directory. -## dspace/src/main/docker-compose resources +Documentation for all Dockerfiles used by these compose scripts can be found in the ["docker" folder README](../docker/README.md) + +## Additional Docker Compose tools (in ./dspace/src/main/docker-compose) - cli.assetstore.yml - Docker compose file that will download and install a default assetstore. + - The default assetstore is the configurable entities test dataset. Useful for [testing/demos of Entities](#Ingest Option 2 Ingest Entities Test Data). - cli.ingest.yml - - Docker compose file that will run an AIP ingest into DSpace 7. + - Docker compose file that will run an AIP ingest into DSpace 7. Useful for testing/demos with basic Items. - db.entities.yml - - Docker compose file that pre-populate a database instance using a SQL dump. The default dataset is the configurable entities test dataset. -- local.cfg - - Sets the environment used across containers run with docker-compose + - Docker compose file that pre-populate a database instance using a downloaded SQL dump. + - The default dataset is the configurable entities test dataset. Useful for [testing/demos of Entities](#Ingest Option 2 Ingest Entities Test Data). +- db.restore.yml + - Docker compose file that pre-populate a database instance using a *local* SQL dump (hardcoded to `./pgdump.sql`) + - Useful for restoring data from a local backup, or [Upgrading PostgreSQL in Docker](#Upgrading PostgreSQL in Docker) - docker-compose-angular.yml - - Docker compose file that will start a published DSpace angular container that interacts with the branch. + - Docker compose file that will start a published DSpace User Interface container that interacts with the branch. - docker-compose-shibboleth.yml - Docker compose file that will start a *test/demo* Shibboleth SP container (in Apache) that proxies requests to the DSpace container - ONLY useful for testing/development. NOT production ready. +- docker-compose-iiif.yml + - Docker compose file that will start a *test/demo* Cantaloupe image server container required for enabling IIIF support. + - ONLY useful for testing/development. NOT production ready. + +Documentation for all Dockerfiles used by these compose scripts can be found in the ["docker" folder README](../docker/README.md) + ## To refresh / pull DSpace images from Dockerhub ``` @@ -55,6 +79,12 @@ docker-compose -p d7 up -d docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/docker-compose-angular.yml up -d ``` +## Run DSpace REST and DSpace Angular from local branches + +*Allows you to run the backend from the "DSpace/DSpace" codebase while also running the frontend from the "DSpace/dspace-angular" codebase.* + +See documentation in [DSpace User Interface Docker instructions](https://github.com/DSpace/dspace-angular/blob/main/docker/README.md#run-dspace-rest-and-dspace-angular-from-local-branches). + ## Run DSpace 7 REST with a IIIF Image Server from your branch *Only useful for testing IIIF support in a development environment* @@ -67,7 +97,6 @@ docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/doc ``` ## Run DSpace 7 REST and Shibboleth SP (in Apache) from your branch - *Only useful for testing Shibboleth in a development environment* This Shibboleth container uses https://samltest.id/ as an IdP (see `../docker/dspace-shibboleth/`). @@ -143,21 +172,11 @@ The remainder of these instructions assume you are using ngrok (though other pro DSPACE_HOSTNAME=[subdomain].ngrok.io docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/docker-compose-angular.yml -f dspace/src/main/docker-compose/docker-compose-shibboleth.yml up -d ``` -## Run DSpace 7 REST and Angular from local branches +## Sample Test Data -_The system will be started in 2 steps. Each step shares the same docker network._ +### Ingesting test content from AIP files -From DSpace/DSpace -``` -docker-compose -p d7 up -d -``` - -From DSpace/DSpace-angular (build as needed) -``` -docker-compose -p d7 -f docker/docker-compose.yml up -d -``` - -## Ingest Option 1: Ingesting test content from AIP files into a running DSpace 7 instance +*Allows you to ingest a set of AIPs into your DSpace instance for testing/demo purposes.* These AIPs represent basic Communities, Collections and Items. Prerequisites - Start DSpace 7 using one of the options listed above @@ -173,8 +192,14 @@ Download a Zip file of AIP content and ingest test data docker-compose -p d7 -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli ``` -## Ingest Option 2: Ingest Entities Test Data -_Remove your d7 volumes if you already ingested content into your docker volumes_ +### Ingest Entities Test Data + +*Allows you to load Configurable Entities test data for testing/demo purposes.* + +Prerequisites +- Start DSpace 7 using one of the options listed above +- Build the DSpace CLI image if needed. See the instructions above. +- _Remove your d7 volumes if you already ingested content into your docker volumes_ Start DSpace REST with a postgres database dump downloaded from the internet. ``` @@ -212,3 +237,85 @@ Similarly, you can see the value of any DSpace configuration (in local.cfg or ds # Output the value of `dspace.ui.url` from running Docker instance docker-compose -p d7 -f docker-compose-cli.yml run --rm dspace-cli dsprop -p dspace.ui.url ``` + +NOTE: It is also possible to run CLI scripts directly on the "dspace" container (where the backend runs) +This can be useful if you want to pass environment variables which override DSpace configs. +``` +# Run the "./dspace database clean" command from the "dspace" container +# Before doing so, it sets "db.cleanDisabled=false". +# WARNING: This will delete all your data. It's just an example of how to do so. +docker-compose -p d7 exec -e "db__P__cleanDisabled=false" dspace /dspace/bin/dspace database clean +``` + +## Upgrading PostgreSQL in Docker + +Occasionally, we update our `dspace-postgres-*` images to use a new version of PostgreSQL. +Simply using the new image will likely throw errors as the pgdata (postgres data) directory is incompatible +with the new version of PostgreSQL. These errors look like: +``` +FATAL: database files are incompatible with server +DETAIL: The data directory was initialized by PostgreSQL version 11, which is not compatible with this version 13.10 +``` + +Here's how to fix those issues by migrating your old Postgres data to the new version of Postgres + +1. First, you must start up the older PostgreSQL image (to dump your existing data to a `*.sql` file) + ``` + # This command assumes you are using the process described above to start all your containers + docker-compose -p d7 up -d + ``` + * If you've already accidentally updated to the new PostgreSQL image, you have a few options: + * Pull down an older version of the image from Dockerhub (using a tag) + * Or, temporarily rebuild your local image with the old version of Postgres. For example: + ``` + # This command will rebuild using PostgreSQL v11 & tag it locally as "dspace-7_x" + docker build --build-arg POSTGRES_VERSION=11 -t dspace/dspace-postgres-pgcrypto:dspace-7_x ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + # Then restart container with that image + docker-compose -p d7 up -d + ``` +2. Dump your entire "dspace" database out of the old "dspacedb" container to a local file named `pgdump.sql` + ``` + # NOTE: WE HIGHLY RECOMMEND LOGGING INTO THE CONTAINER and doing the pg_dump within the container. + # If you attempt to run pg_dump from your local machine via docker "exec" (or similar), sometimes + # UTF-8 characters can be corrupted in the export file. This may result in data loss. + + # First login to the "dspacedb" container + docker exec -it dspacedb /bin/bash + + # Dump the "dspace" database to a file named "/tmp/pgdump.sql" within the container + pg_dump -U dspace dspace > /tmp/pgdump.sql + + # Exit the container + exit + + # Download (copy) that /tmp/pgdump.sql backup file from container to your local machine + docker cp dspacedb:/tmp/pgdump.sql . + ``` +3. Now, stop all existing containers. This shuts down the old version of PostgreSQL + ``` + # This command assumes you are using the process described above to start/stop all your containers + docker-compose -p d7 down + ``` +4. Delete the `pgdata` volume. WARNING: This deletes all your old PostgreSQL data. Make sure you have that `pgdump.sql` file FIRST! + ``` + # Assumes you are using `-p d7` which prefixes all volumes with `d7_` + docker volume rm d7_pgdata + ``` +5. Now, pull down the latest PostgreSQL image with the NEW version of PostgreSQL. + ``` + docker-compose -f docker-compose.yml -f docker-compose-cli.yml pull + ``` +6. Start everything up using our `db.restore.yml` script. This script will recreate the database +using the local `./pgdump.sql` file. IMPORTANT: If you renamed that "pgdump.sql" file or stored it elsewhere, +then you MUST change the name/directory in the `db.restore.yml` script. + ``` + # Restore database from "./pgdump.sql" (this path is hardcoded in db.restore.yml) + docker-compose -p d7 -f docker-compose.yml -f dspace/src/main/docker-compose/db.restore.yml up -d + ``` +7. Finally, reindex all database contents into Solr (just to be sure Solr indexes are current). + ``` + # Run "./dspace index-discovery -b" using our CLI image + docker-compose -p d7 -f docker-compose-cli.yml run --rm dspace-cli index-discovery -b + ``` +At this point in time, all your old database data should be migrated to the new Postgres +and running at http://localhost:8080/server/ \ No newline at end of file diff --git a/dspace/src/main/docker-compose/db.entities.yml b/dspace/src/main/docker-compose/db.entities.yml index 8d86f7bb8359..32c54a5d0bd1 100644 --- a/dspace/src/main/docker-compose/db.entities.yml +++ b/dspace/src/main/docker-compose/db.entities.yml @@ -10,7 +10,7 @@ version: "3.7" services: dspacedb: - image: dspace/dspace-postgres-pgcrypto:loadsql + image: dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql environment: # This SQL is available from https://github.com/DSpace-Labs/AIP-Files/releases/tag/demo-entities-data - LOADSQL=https://github.com/DSpace-Labs/AIP-Files/releases/download/demo-entities-data/dspace7-entities-data.sql diff --git a/dspace/src/main/docker-compose/db.restore.yml b/dspace/src/main/docker-compose/db.restore.yml new file mode 100644 index 000000000000..fc2f30b9d8e0 --- /dev/null +++ b/dspace/src/main/docker-compose/db.restore.yml @@ -0,0 +1,26 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +version: "3.7" + +# +# Overrides the default "dspacedb" container behavior to load a local SQL file into PostgreSQL. +# +# This can be used to restore a "dspacedb" container from a pg_dump, or during upgrade to a new version of PostgreSQL. +services: + dspacedb: + image: dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql + environment: + # Location where the dump SQL file will be available on the running container + - LOCALSQL=/tmp/pgdump.sql + volumes: + # Volume which shares a local SQL file at "./pgdump.sql" to the running container + # IF YOUR LOCAL FILE HAS A DIFFERENT NAME (or is in a different location), then change the "./pgdump.sql" + # portion of this line. + - ./pgdump.sql:/tmp/pgdump.sql + diff --git a/dspace/src/main/docker/README.md b/dspace/src/main/docker/README.md index 6c9da0190cd2..ac1b4cb9236b 100644 --- a/dspace/src/main/docker/README.md +++ b/dspace/src/main/docker/README.md @@ -1,4 +1,4 @@ -# Docker images supporting DSpace +# Docker images supporting DSpace Backend *** :warning: **THESE IMAGES ARE NOT PRODUCTION READY** The below Docker Compose images/resources were built for development/testing only. Therefore, they may not be fully secured or up-to-date, and should not be used in production. @@ -6,9 +6,15 @@ If you wish to run DSpace on Docker in production, we recommend building your own Docker images. You are welcome to borrow ideas/concepts from the below images in doing so. But, the below images should not be used "as is" in any production scenario. *** -## Dockerfile.dependencies +## Overview +The Dockerfiles in this directory (and subdirectories) are used by our [Docker Compose scripts](../docker-compose/README.md). + +## Dockerfile.dependencies (in root folder) This Dockerfile is used to pre-cache Maven dependency downloads that will be used in subsequent DSpace docker builds. +Caching these Maven dependencies provides a speed increase to all later builds by ensuring the dependencies +are only downloaded once. + ``` docker build -t dspace/dspace-dependencies:dspace-7_x -f Dockerfile.dependencies . ``` @@ -22,12 +28,13 @@ Admins to our DockerHub repo can manually publish with the following command. docker push dspace/dspace-dependencies:dspace-7_x ``` -## Dockerfile.test +## Dockerfile.test (in root folder) -This Dockerfile builds a DSpace 7 Tomcat image (for testing/development). -This image deploys two DSpace webapps: +This Dockerfile builds a DSpace 7 backend image (for testing/development). +This image deploys two DSpace webapps to Tomcat running in Docker: 1. The DSpace 7 REST API (at `http://localhost:8080/server`) -2. The legacy (v6) REST API (at `http://localhost:8080//rest`), deployed without requiring HTTPS access. +2. The legacy (v6) REST API (at `http://localhost:8080/rest`), deployed without requiring HTTPS access. +This image also sets up debugging in Tomcat for development. ``` docker build -t dspace/dspace:dspace-7_x-test -f Dockerfile.test . @@ -42,12 +49,12 @@ Admins to our DockerHub repo can manually publish with the following command. docker push dspace/dspace:dspace-7_x-test ``` -## Dockerfile +## Dockerfile (in root folder) -This Dockerfile builds a DSpace 7 tomcat image. -This image deploys two DSpace webapps: +This Dockerfile builds a DSpace 7 backend image. +This image deploys one DSpace webapp to Tomcat running in Docker: 1. The DSpace 7 REST API (at `http://localhost:8080/server`) -2. The legacy (v6) REST API (at `http://localhost:8080//rest`), deployed *requiring* HTTPS access. + ``` docker build -t dspace/dspace:dspace-7_x -f Dockerfile . ``` @@ -61,9 +68,9 @@ Admins to our DockerHub repo can publish with the following command. docker push dspace/dspace:dspace-7_x ``` -## Dockefile.cli +## Dockerfile.cli (in root folder) -This Dockerfile builds a DSpace 7 CLI image, which can be used to run commandline tools via Docker. +This Dockerfile builds a DSpace 7 CLI (command line interface) image, which can be used to run DSpace's commandline tools via Docker. ``` docker build -t dspace/dspace-cli:dspace-7_x -f Dockerfile.cli . ``` @@ -77,46 +84,60 @@ Admins to our DockerHub repo can publish with the following command. docker push dspace/dspace-cli:dspace-7_x ``` -## dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +## ./dspace-postgres-pgcrypto/Dockerfile This is a PostgreSQL Docker image containing the `pgcrypto` extension required by DSpace 6+. +This image is built *automatically* after each commit is made to the `main` branch. + +How to build manually: ``` cd dspace/src/main/docker/dspace-postgres-pgcrypto -docker build -t dspace/dspace-postgres-pgcrypto . +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x . ``` -**This image is built manually.** It should be rebuilt as needed. +It is also possible to change the version of PostgreSQL or the PostgreSQL user's password during the build: +``` +cd dspace/src/main/docker/dspace-postgres-pgcrypto +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x --build-arg POSTGRES_VERSION=11 --build-arg POSTGRES_PASSWORD=mypass . +``` A copy of this file exists in the DSpace 6 branch. A specialized version of this file exists for DSpace 4 in DSpace-Docker-Images. -Admins to our DockerHub repo can publish with the following command. +Admins to our DockerHub repo can (manually) publish with the following command. ``` -docker push dspace/dspace-postgres-pgcrypto +docker push dspace/dspace-postgres-pgcrypto:dspace-7_x ``` -## dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +## ./dspace-postgres-pgcrypto-curl/Dockerfile This is a PostgreSQL Docker image containing the `pgcrypto` extension required by DSpace 6+. This image also contains `curl`. The image is pre-configured to load a Postgres database dump on initialization. + +This image is built *automatically* after each commit is made to the `main` branch. + +How to build manually: ``` cd dspace/src/main/docker/dspace-postgres-pgcrypto-curl -docker build -t dspace/dspace-postgres-pgcrypto:loadsql . +docker build -t dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql . ``` -**This image is built manually.** It should be rebuilt as needed. +Similar to `dspace-postgres-pgcrypto` above, you can also modify the version of PostgreSQL or the PostgreSQL user's password. +See examples above. A copy of this file exists in the DSpace 6 branch. -Admins to our DockerHub repo can publish with the following command. +Admins to our DockerHub repo can (manually) publish with the following command. ``` -docker push dspace/dspace-postgres-pgcrypto:loadsql +docker push dspace/dspace-postgres-pgcrypto:dspace-7_x-loadsql ``` -## dspace/src/main/docker/dspace-shibboleth/Dockerfile +## ./dspace-shibboleth/Dockerfile This is a test / demo image which provides an Apache HTTPD proxy (in front of Tomcat) -with mod_shib & Shibboleth installed. It is primarily for usage for -testing DSpace's Shibboleth integration. It uses https://samltest.id/ as the Shibboleth IDP +with `mod_shib` & Shibboleth installed based on the +[DSpace Shibboleth configuration instructions](https://wiki.lyrasis.org/display/DSDOC7x/Authentication+Plugins#AuthenticationPlugins-ShibbolethAuthentication). +It is primarily for usage for testing DSpace's Shibboleth integration. +It uses https://samltest.id/ as the Shibboleth IDP **This image is built manually.** It should be rebuilt as needed. @@ -130,10 +151,28 @@ docker run -i -t -d -p 80:80 -p 443:443 dspace/dspace-shibboleth This image can also be rebuilt using the `../docker-compose/docker-compose-shibboleth.yml` script. +## ./dspace-solr/Dockerfile + +This Dockerfile builds a Solr image with DSpace Solr configsets included. It +can be pulled / built following the [docker compose resources](../docker-compose/README.md) +documentation. Or, to just build and/or run Solr: + +```bash +docker-compose build dspacesolr +docker-compose -p d7 up -d dspacesolr +``` + +If you're making iterative changes to the DSpace Solr configsets you'll need to rebuild / +restart the `dspacesolr` container for the changes to be deployed. From DSpace root: + +```bash +docker-compose -p d7 up --detach --build dspacesolr +``` -## test/ folder +## ./test/ folder These resources are bundled into the `dspace/dspace:dspace-*-test` image at build time. +See the `Dockerfile.test` section above for more information about the test image. ## Debugging Docker builds diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile index 0e85dd33ce59..b2131a740262 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile @@ -6,14 +6,21 @@ # http://www.dspace.org/license/ # -# This will be deployed as dspace/dspace-postgres-pgcrpyto:loadsql -FROM postgres:11 +# To build for example use: +# docker build --build-arg POSTGRES_VERSION=13 --build-arg POSTGRES_PASSWORD=mypass ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ +# This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION-loadsql + +ARG POSTGRES_VERSION=13 +ARG POSTGRES_PASSWORD=dspace + +FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD dspace +ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} -# Load a SQL dump. Set LOADSQL to a URL for the sql dump file. -RUN apt-get update && apt-get install -y curl +# Install curl which is necessary to load SQL file +RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* +# Load a SQL dump. Set LOADSQL to a URL for the sql dump file. COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh index 054d3dede5dc..3f8e95e1044f 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/install-pgcrypto.sh @@ -11,15 +11,33 @@ set -e CHECKFILE=/pgdata/ingest.hasrun.flag +# If $LOADSQL environment variable set, use 'curl' to download that SQL and run it in PostgreSQL +# This can be used to initialize a database based on test data available on the web. if [ ! -f $CHECKFILE -a ! -z ${LOADSQL} ] then - curl ${LOADSQL} -L -s --output /tmp/dspace.sql - psql -U $POSTGRES_USER < /tmp/dspace.sql + # Download SQL file to /tmp/dspace-db-init.sql + curl ${LOADSQL} -L -s --output /tmp/dspace-db-init.sql + # Load into PostgreSQL + psql -U $POSTGRES_USER < /tmp/dspace-db-init.sql + # Remove downloaded file + rm /tmp/dspace-db-init.sql touch $CHECKFILE exit fi +# If $LOCALSQL environment variable set, then simply run it in PostgreSQL +# This can be used to restore data from a pg_dump or similar. +if [ ! -f $CHECKFILE -a ! -z ${LOCALSQL} ] +then + # Load into PostgreSQL + psql -U $POSTGRES_USER < ${LOCALSQL} + + touch $CHECKFILE + exit +fi + +# Then, setup pgcrypto on this database psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL -- Create a new schema in this database named "extensions" (or whatever you want to name it) CREATE SCHEMA extensions; diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile index 84b7569a2b2c..7dde1a6bfd1c 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile @@ -6,13 +6,18 @@ # http://www.dspace.org/license/ # -# This will be deployed as dspace/dspace-postgres-pgcrpyto:latest -FROM postgres:11 +# To build for example use: +# docker build --build-arg POSTGRES_VERSION=13 --build-arg POSTGRES_PASSWORD=mypass ./dspace/src/main/docker/dspace-postgres-pgcrypto/ +# This will be published as dspace/dspace-postgres-pgcrypto:$DSPACE_VERSION + +ARG POSTGRES_VERSION=13 +ARG POSTGRES_PASSWORD=dspace + +FROM postgres:${POSTGRES_VERSION} ENV POSTGRES_DB dspace ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD dspace - -RUN apt-get update +ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} +# Copy over script which will initialize database and install pgcrypto extension COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ diff --git a/dspace/src/main/docker/dspace-solr/Dockerfile b/dspace/src/main/docker/dspace-solr/Dockerfile new file mode 100644 index 000000000000..9fe9adf9440f --- /dev/null +++ b/dspace/src/main/docker/dspace-solr/Dockerfile @@ -0,0 +1,36 @@ +# +# The contents of this file are subject to the license and copyright +# detailed in the LICENSE and NOTICE files at the root of the source +# tree and available online at +# +# http://www.dspace.org/license/ +# + +# To build use root as context for (easier) access to solr cfgs +# docker build --build-arg SOLR_VERSION=8.11 -f ./dspace/src/main/docker/dspace-solr/Dockerfile . +# This will be published as dspace/dspace-solr:$DSPACE_VERSION + +ARG SOLR_VERSION=8.11 + +FROM solr:${SOLR_VERSION}-slim + +ENV AUTHORITY_CONFIGSET_PATH=/opt/solr/server/solr/configsets/authority/conf \ + OAI_CONFIGSET_PATH=/opt/solr/server/solr/configsets/oai/conf \ + SEARCH_CONFIGSET_PATH=/opt/solr/server/solr/configsets/search/conf \ + STATISTICS_CONFIGSET_PATH=/opt/solr/server/solr/configsets/statistics/conf + +USER root + +RUN mkdir -p $AUTHORITY_CONFIGSET_PATH && \ + mkdir -p $OAI_CONFIGSET_PATH && \ + mkdir -p $SEARCH_CONFIGSET_PATH && \ + mkdir -p $STATISTICS_CONFIGSET_PATH + +COPY dspace/solr/authority/conf/* $AUTHORITY_CONFIGSET_PATH/ +COPY dspace/solr/oai/conf/* $OAI_CONFIGSET_PATH/ +COPY dspace/solr/search/conf/* $SEARCH_CONFIGSET_PATH/ +COPY dspace/solr/statistics/conf/* $STATISTICS_CONFIGSET_PATH/ + +RUN chown -R solr:solr /opt/solr/server/solr/configsets + +USER solr diff --git a/pom.xml b/pom.xml index 9f07e8d733d9..e1b13c4c6aaa 100644 --- a/pom.xml +++ b/pom.xml @@ -19,13 +19,13 @@ 11 - 5.3.20 - 2.6.8 - 5.6.5 - 5.6.5.Final - 6.0.23.Final - 42.4.3 - 8.11.1 + 5.3.27 + 2.7.12 + 5.7.8 + 5.6.15.Final + 6.2.5.Final + 42.6.0 + 8.11.2 3.4.0 2.10.0 @@ -37,12 +37,12 @@ 2.3.1 1.1.0 - 9.4.48.v20220622 - 2.17.1 - 2.0.27 - 1.18.0 - 1.7.25 - 2.3.0 + 9.4.51.v20230217 + 2.20.0 + 2.0.28 + 1.19.0 + 1.7.36 + 2.5.0 1.70 @@ -938,7 +938,7 @@ - + @@ -1576,7 +1576,7 @@ commons-fileupload commons-fileupload - 1.3.3 + 1.5 commons-io @@ -1709,11 +1709,6 @@ icu4j 62.1 - - com.oracle - ojdbc6 - 11.2.0.4.0 - org.dspace @@ -1869,7 +1864,7 @@ com.google.guava guava - 31.0.1-jre + 32.0.0-jre From 8aad3654969562fc0bae19408339bafe5517ff47 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 14 Sep 2023 16:08:25 -0500 Subject: [PATCH 479/686] Add basic pagination to /groups/[uuid]/epersons endpoint (cherry picked from commit 74c72354b405ed266b65cdd50b594d25bea0e87f) --- .../dspace/eperson/EPersonServiceImpl.java | 17 +++++++++- .../org/dspace/eperson/dao/EPersonDAO.java | 24 +++++++++++++- .../eperson/dao/impl/EPersonDAOImpl.java | 21 ++++++++++-- .../eperson/service/EPersonService.java | 33 +++++++++++++++++-- .../GroupEPersonLinkRepository.java | 13 +++++++- 5 files changed, 101 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index 2d0574a6301d..5f17051dbbb0 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -567,14 +567,29 @@ public List getDeleteConstraints(Context context, EPerson ePerson) throw @Override public List findByGroups(Context c, Set groups) throws SQLException { + return findByGroups(c, groups, -1, -1); + } + + @Override + public List findByGroups(Context c, Set groups, int pageSize, int offset) throws SQLException { //Make sure we at least have one group, if not don't even bother searching. if (CollectionUtils.isNotEmpty(groups)) { - return ePersonDAO.findByGroups(c, groups); + return ePersonDAO.findByGroups(c, groups, pageSize, offset); } else { return new ArrayList<>(); } } + @Override + public int countByGroups(Context c, Set groups) throws SQLException { + //Make sure we at least have one group, if not don't even bother counting. + if (CollectionUtils.isNotEmpty(groups)) { + return ePersonDAO.countByGroups(c, groups); + } else { + return 0; + } + } + @Override public List findEPeopleWithSubscription(Context context) throws SQLException { return ePersonDAO.findAllSubscribers(context); diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java index 51ab89ef7e8f..573103f86ad3 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java @@ -38,7 +38,29 @@ public List search(Context context, String query, List q public int searchResultCount(Context context, String query, List queryFields) throws SQLException; - public List findByGroups(Context context, Set groups) throws SQLException; + /** + * Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. Order is + * indeterminate. + * + * @param context current Context + * @param groups Set of group(s) to check membership in + * @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination + * @param offset number of page to load (starting with 1). Set to <=0 to disable pagination + * @return List of all EPersons who are a member of one or more groups. + * @throws SQLException + */ + List findByGroups(Context context, Set groups, int pageSize, int offset) throws SQLException; + + /** + * Count total number of EPersons who are a member of one or more of the listed groups. This provides the total + * number of results to expect from corresponding findByGroups() for pagination purposes. + * + * @param context current Context + * @param groups Set of group(s) to check membership in + * @return total number of (unique) EPersons who are a member of one or more groups. + * @throws SQLException + */ + int countByGroups(Context context, Set groups) throws SQLException; public List findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java index 50547a500745..14b44d77c0cc 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java @@ -112,7 +112,7 @@ public List findAll(Context context, MetadataField metadataSortField, S } @Override - public List findByGroups(Context context, Set groups) throws SQLException { + public List findByGroups(Context context, Set groups, int pageSize, int offset) throws SQLException { Query query = createQuery(context, "SELECT DISTINCT e FROM EPerson e " + "JOIN e.groups g " + @@ -125,7 +125,24 @@ public List findByGroups(Context context, Set groups) throws SQL query.setParameter("idList", idList); - return list(query); + return list(query, pageSize, offset); + } + + @Override + public int countByGroups(Context context, Set groups) throws SQLException { + Query query = createQuery(context, + "SELECT count(DISTINCT e) FROM EPerson e " + + "JOIN e.groups g " + + "WHERE g.id IN (:idList) "); + + List idList = new ArrayList<>(groups.size()); + for (Group group : groups) { + idList.add(group.getID()); + } + + query.setParameter("idList", idList); + + return count(query); } @Override diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index 47be942e97e9..b60247ef5498 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -252,14 +252,43 @@ public EPerson create(Context context) throws SQLException, public List getDeleteConstraints(Context context, EPerson ePerson) throws SQLException; /** - * Retrieve all accounts which belong to at least one of the specified groups. + * Retrieve all EPerson accounts which belong to at least one of the specified groups. + *

    + * WARNING: This method should be used sparingly, as it could have performance issues for Groups with very large + * lists of members. In that situation, a very large number of EPerson objects will be loaded into memory. + * See https://github.com/DSpace/DSpace/issues/9052 + *

    + * For better performance, use the paginated version of this method. * * @param c The relevant DSpace Context. * @param groups set of eperson groups * @return a list of epeople * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List findByGroups(Context c, Set groups) throws SQLException; + List findByGroups(Context c, Set groups) throws SQLException; + + /** + * Retrieve all EPerson accounts which belong to at least one of the specified groups, in a paginated fashion. + * + * @param c The relevant DSpace Context. + * @param groups Set of group(s) to check membership in + * @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination + * @param offset number of page to load (starting with 1). Set to <=0 to disable pagination + * @return a list of epeople + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + List findByGroups(Context c, Set groups, int pageSize, int offset) throws SQLException; + + /** + * Count all EPerson accounts which belong to at least one of the specified groups. This provides the total + * number of results to expect from corresponding findByGroups() for pagination purposes. + * + * @param c The relevant DSpace Context. + * @param groups Set of group(s) to check membership in + * @return total number of (unique) EPersons who are a member of one or more groups. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + int countByGroups(Context c, Set groups) throws SQLException; /** * Retrieve all accounts which are subscribed to receive information about new items. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java index b1cdc401f22f..1ce278893d17 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupEPersonLinkRepository.java @@ -8,6 +8,8 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.List; +import java.util.Set; import java.util.UUID; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; @@ -15,7 +17,9 @@ import org.dspace.app.rest.model.GroupRest; import org.dspace.app.rest.projection.Projection; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; @@ -31,6 +35,9 @@ public class GroupEPersonLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { + @Autowired + EPersonService epersonService; + @Autowired GroupService groupService; @@ -45,7 +52,11 @@ public Page getMembers(@Nullable HttpServletRequest request, if (group == null) { throw new ResourceNotFoundException("No such group: " + groupId); } - return converter.toRestPage(group.getMembers(), optionalPageable, projection); + int total = epersonService.countByGroups(context, Set.of(group)); + Pageable pageable = utils.getPageable(optionalPageable); + List members = epersonService.findByGroups(context, Set.of(group), pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(members, pageable, total, projection); } catch (SQLException e) { throw new RuntimeException(e); } From 8b748af944fd6b3888cfa1a5fa0f1b05d7215213 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 14 Sep 2023 16:26:09 -0500 Subject: [PATCH 480/686] Bug fix. Only use pageSize and offset if >0 (cherry picked from commit 15de2d0074b56f421b3bbb9f3955814497985aef) --- .../org/dspace/eperson/dao/impl/EPersonDAOImpl.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java index 14b44d77c0cc..bd68a7f399d9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java @@ -112,7 +112,8 @@ public List findAll(Context context, MetadataField metadataSortField, S } @Override - public List findByGroups(Context context, Set groups, int pageSize, int offset) throws SQLException { + public List findByGroups(Context context, Set groups, int pageSize, int offset) + throws SQLException { Query query = createQuery(context, "SELECT DISTINCT e FROM EPerson e " + "JOIN e.groups g " + @@ -122,10 +123,16 @@ public List findByGroups(Context context, Set groups, int pageSi for (Group group : groups) { idList.add(group.getID()); } - query.setParameter("idList", idList); - return list(query, pageSize, offset); + if (pageSize > 0) { + query.setMaxResults(pageSize); + } + if (offset > 0) { + query.setFirstResult(offset); + } + + return list(query); } @Override From a4db6d39d70013858204d6a376bd319d124b4f34 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 14 Sep 2023 16:33:59 -0500 Subject: [PATCH 481/686] Add missing pagination test for /groups/[uuid]/epersons (cherry picked from commit 457dd9ae441fa084ff7cc3eaf9213e5497a2b298) --- .../app/rest/GroupRestRepositoryIT.java | 78 +++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java index fda8b15effa3..4d68652e249a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java @@ -3091,6 +3091,84 @@ public void findByMetadataPaginationTest() throws Exception { } + // Test of /groups/[uuid]/epersons pagination + @Test + public void epersonMemberPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EPerson eperson1 = EPersonBuilder.createEPerson(context) + .withEmail("test1@example.com") + .withNameInMetadata("Test1", "User") + .build(); + EPerson eperson2 = EPersonBuilder.createEPerson(context) + .withEmail("test2@example.com") + .withNameInMetadata("Test2", "User") + .build(); + EPerson eperson3 = EPersonBuilder.createEPerson(context) + .withEmail("test3@example.com") + .withNameInMetadata("Test3", "User") + .build(); + EPerson eperson4 = EPersonBuilder.createEPerson(context) + .withEmail("test4@example.com") + .withNameInMetadata("Test4", "User") + .build(); + EPerson eperson5 = EPersonBuilder.createEPerson(context) + .withEmail("test5@example.com") + .withNameInMetadata("Test5", "User") + .build(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .addMember(eperson1) + .addMember(eperson2) + .addMember(eperson3) + .addMember(eperson4) + .addMember(eperson5) + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/epersons") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/epersons") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/epersons") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + @Test public void commAdminAndColAdminCannotExploitItemReadGroupTest() throws Exception { From 22fb4508ee72c295174868f7bd91c8b85b3e2d05 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 15 Sep 2023 16:56:18 -0500 Subject: [PATCH 482/686] Add pagination to /groups/[uuid]/subgroups endpoint, along with tests (cherry picked from commit e7c4b9eba2d8148e07543c3b6c61dde359018da2) --- .../main/java/org/dspace/eperson/Group.java | 14 +++- .../org/dspace/eperson/GroupServiceImpl.java | 16 ++++ .../java/org/dspace/eperson/dao/GroupDAO.java | 24 ++++++ .../dspace/eperson/dao/impl/GroupDAOImpl.java | 23 ++++++ .../eperson/service/EPersonService.java | 5 +- .../dspace/eperson/service/GroupService.java | 25 +++++++ .../java/org/dspace/eperson/GroupTest.java | 27 +++++++ .../repository/GroupGroupLinkRepository.java | 7 +- .../app/rest/GroupRestRepositoryIT.java | 73 +++++++++++++++++++ 9 files changed, 207 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/Group.java b/dspace-api/src/main/java/org/dspace/eperson/Group.java index 6cb534146b25..67655e0e0aaf 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Group.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Group.java @@ -98,7 +98,11 @@ void addMember(EPerson e) { } /** - * Return EPerson members of a Group + * Return EPerson members of a Group. + *

    + * WARNING: This method may have bad performance for Groups with large numbers of EPerson members. + * Therefore, only use this when you need to access every EPerson member. Instead, consider using + * EPersonService.findByGroups() for a paginated list of EPersons. * * @return list of EPersons */ @@ -143,9 +147,13 @@ List getParentGroups() { } /** - * Return Group members of a Group. + * Return Group members (i.e. direct subgroups) of a Group. + *

    + * WARNING: This method may have bad performance for Groups with large numbers of Subgroups. + * Therefore, only use this when you need to access every Subgroup. Instead, consider using + * GroupService.findByParent() for a paginated list of Subgroups. * - * @return list of groups + * @return list of subgroups */ public List getMemberGroups() { return groups; diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 607e57af0b2c..4fdd1a3ba384 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -829,4 +829,20 @@ public List findByMetadataField(final Context context, final String searc public String getName(Group dso) { return dso.getName(); } + + @Override + public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { + if (parent == null) { + return null; + } + return groupDAO.findByParent(context, parent, pageSize, offset); + } + + @Override + public int countByParent(Context context, Group parent) throws SQLException { + if (parent == null) { + return 0; + } + return groupDAO.countByParent(context, parent); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java index 2cc77129f038..fd56fe9bd1d0 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java @@ -146,4 +146,28 @@ List findAll(Context context, List metadataSortFields, int */ Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException; + /** + * Find all groups which are members of a given parent group. + * This provides the same behavior as group.getMemberGroups(), but in a paginated fashion. + * + * @param context The DSpace context + * @param parent Parent Group to search within + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return Groups matching the query + * @throws SQLException if database error + */ + List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException; + + /** + * Returns the number of groups which are members of a given parent group. + * This provides the same behavior as group.getMemberGroups().size(), but with better performance for large groups. + * This method may be used with findByParent() to perform pagination. + * + * @param context The DSpace context + * @param parent Parent Group to search within + * @return Number of Groups matching the query + * @throws SQLException if database error + */ + int countByParent(Context context, Group parent) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java index edc2ab749bfa..f071a1bc754b 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java @@ -196,4 +196,27 @@ public int countRows(Context context) throws SQLException { return count(createQuery(context, "SELECT count(*) FROM Group")); } + @Override + public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { + Query query = createQuery(context, + "from Group where (from Group g where g.id = :parent_id) in elements (parentGroups)"); + query.setParameter("parent_id", parent.getID()); + if (pageSize > 0) { + query.setMaxResults(pageSize); + } + if (offset > 0) { + query.setFirstResult(offset); + } + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } + + public int countByParent(Context context, Group parent) throws SQLException { + Query query = createQuery(context, "SELECT count(*) from Group " + + "where (from Group g where g.id = :parent_id) in elements (parentGroups)"); + query.setParameter("parent_id", parent.getID()); + + return count(query); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index b60247ef5498..5b10ea539b33 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -254,9 +254,8 @@ public EPerson create(Context context) throws SQLException, /** * Retrieve all EPerson accounts which belong to at least one of the specified groups. *

    - * WARNING: This method should be used sparingly, as it could have performance issues for Groups with very large - * lists of members. In that situation, a very large number of EPerson objects will be loaded into memory. - * See https://github.com/DSpace/DSpace/issues/9052 + * WARNING: This method may have bad performance issues for Groups with a very large number of members, + * as it will load all member EPerson objects into memory. *

    * For better performance, use the paginated version of this method. * diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java index 8979bcc4457a..634fd0aca20c 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java @@ -327,4 +327,29 @@ public List findAll(Context context, List metadataSortFiel */ List findByMetadataField(Context context, String searchValue, MetadataField metadataField) throws SQLException; + + /** + * Find all groups which are a member of the given Parent group + * + * @param context The relevant DSpace Context. + * @param parent The parent Group to search on + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return List of all groups which are members of the parent group + * @throws SQLException database exception if error + */ + List findByParent(Context context, Group parent, int pageSize, int offset) + throws SQLException; + + /** + * Return number of groups which are a member of the given Parent group. + * Can be used with findByParent() for pagination of all groups within a given Parent group. + * + * @param context The relevant DSpace Context. + * @param parent The parent Group to search on + * @return number of groups which are members of the parent group + * @throws SQLException database exception if error + */ + int countByParent(Context context, Group parent) + throws SQLException; } diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index ee9c883f1be6..7666fcfe5431 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -10,6 +10,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -24,6 +25,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.GroupBuilder; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; @@ -620,6 +622,31 @@ public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionEx assertTrue(groupService.isEmpty(level2Group)); } + @Test + public void findAndCountByParent() throws SQLException, AuthorizeException, IOException { + // Create a parent group with 3 child groups + Group parentGroup = createGroup("parentGroup"); + Group childGroup = createGroup("childGroup"); + Group child2Group = createGroup("child2Group"); + Group child3Group = createGroup("child3Group"); + groupService.addMember(context, parentGroup, childGroup); + groupService.addMember(context, parentGroup, child2Group); + groupService.addMember(context, parentGroup, child3Group); + groupService.update(context, parentGroup); + + // Assert that findByParent is the same list of groups as getMemberGroups() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + assertEquals(parentGroup.getMemberGroups(), groupService.findByParent(context, parentGroup, -1, -1)); + // Assert countBy parent is the same as the size of group members + assertEquals(parentGroup.getMemberGroups().size(), groupService.countByParent(context, parentGroup)); + + // Clean up our data + groupService.delete(context, parentGroup); + groupService.delete(context, childGroup); + groupService.delete(context, child2Group); + groupService.delete(context, child3Group); + } + protected Group createGroup(String name) throws SQLException, AuthorizeException { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java index 37cf9083b39a..564e941d45cc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupGroupLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.List; import java.util.UUID; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; @@ -45,7 +46,11 @@ public Page getGroups(@Nullable HttpServletRequest request, if (group == null) { throw new ResourceNotFoundException("No such group: " + groupId); } - return converter.toRestPage(group.getMemberGroups(), optionalPageable, projection); + int total = groupService.countByParent(context, group); + Pageable pageable = utils.getPageable(optionalPageable); + List memberGroups = groupService.findByParent(context, group, pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(memberGroups, pageable, total, projection); } catch (SQLException e) { throw new RuntimeException(e); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java index 4d68652e249a..797657794a6e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java @@ -3169,6 +3169,79 @@ public void epersonMemberPaginationTest() throws Exception { .andExpect(jsonPath("$.page.totalElements", is(5))); } + // Test of /groups/[uuid]/subgroups pagination + @Test + public void subgroupPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .build(); + + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 1") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 2") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 3") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 4") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test subgroup 5") + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/subgroups") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.subgroups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.subgroups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/subgroups") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.subgroups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.subgroups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/" + group.getID() + "/subgroups") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.subgroups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.subgroups").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + @Test public void commAdminAndColAdminCannotExploitItemReadGroupTest() throws Exception { From e48b21637a180f858ff56887a5cb37631a73c97f Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 15 Sep 2023 17:08:03 -0500 Subject: [PATCH 483/686] Add basic unit test for new EpersonService methods (cherry picked from commit c000e54116498030261d988f87a496beef7d21d1) --- .../java/org/dspace/eperson/EPersonTest.java | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index b98db573566d..07f0fa4cd5ae 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -16,6 +16,7 @@ import java.sql.SQLException; import java.util.Iterator; import java.util.List; +import java.util.Set; import javax.mail.MessagingException; import org.apache.commons.codec.DecoderException; @@ -1029,6 +1030,42 @@ public void testCascadingDeleteSubmitterPreservesWorkflowItems() wfi.getSubmitter()); } + @Test + public void findAndCountByGroups() throws SQLException, AuthorizeException, IOException { + // Create a group with 3 EPerson members + Group group = createGroup("parentGroup"); + EPerson eperson1 = createEPersonAndAddToGroup("test1@example.com", group); + EPerson eperson2 = createEPersonAndAddToGroup("test2@example.com", group); + EPerson eperson3 = createEPersonAndAddToGroup("test3@example.com", group); + groupService.update(context, group); + + // Assert that findByGroup is the same list of EPersons as getMembers() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + assertEquals(group.getMembers(), ePersonService.findByGroups(context, Set.of(group), -1, -1)); + // Assert countByGroups is the same as the size of members + assertEquals(group.getMembers().size(), ePersonService.countByGroups(context, Set.of(group))); + + // Add another group with duplicate EPerson + Group group2 = createGroup("anotherGroup"); + groupService.addMember(context, group2, eperson1); + groupService.update(context, group2); + + // Verify countByGroups is still 3 (existing person should not be counted twice) + assertEquals(3, ePersonService.countByGroups(context, Set.of(group, group2))); + + // Add a new EPerson to new group, verify count goes up by one + EPerson eperson4 = createEPersonAndAddToGroup("test4@example.com", group2); + assertEquals(4, ePersonService.countByGroups(context, Set.of(group, group2))); + + // Clean up our data + groupService.delete(context, group); + groupService.delete(context, group2); + ePersonService.delete(context, eperson1); + ePersonService.delete(context, eperson2); + ePersonService.delete(context, eperson3); + ePersonService.delete(context, eperson4); + } + /** * Creates an item, sets the specified submitter. * @@ -1075,4 +1112,32 @@ private WorkspaceItem prepareWorkspaceItem(EPerson submitter) context.restoreAuthSystemState(); return wsi; } + + protected Group createGroup(String name) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + Group group = groupService.create(context); + group.setName(name); + groupService.update(context, group); + context.restoreAuthSystemState(); + return group; + } + + protected EPerson createEPersonAndAddToGroup(String email, Group group) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = createEPerson(email); + groupService.addMember(context, group, ePerson); + groupService.update(context, group); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + + protected EPerson createEPerson(String email) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = ePersonService.create(context); + ePerson.setEmail(email); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } } From 7aca4df6ef8502abefe68738270c11692d77de15 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 18 Sep 2023 09:58:59 -0500 Subject: [PATCH 484/686] Minor unit test fix. Use isEqualCollection to compare list with Hibernate results (cherry picked from commit cdb68a6fdc925fcbb76f9265e64771497b3f78bc) --- .../src/test/java/org/dspace/eperson/EPersonTest.java | 9 ++++++++- .../src/test/java/org/dspace/eperson/GroupTest.java | 9 +++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index 07f0fa4cd5ae..fb62edec0931 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -10,6 +10,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; @@ -20,6 +21,7 @@ import javax.mail.MessagingException; import org.apache.commons.codec.DecoderException; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; @@ -1041,7 +1043,10 @@ public void findAndCountByGroups() throws SQLException, AuthorizeException, IOEx // Assert that findByGroup is the same list of EPersons as getMembers() when pagination is ignored // (NOTE: Pagination is tested in GroupRestRepositoryIT) - assertEquals(group.getMembers(), ePersonService.findByGroups(context, Set.of(group), -1, -1)); + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be compared + // directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue(CollectionUtils.isEqualCollection(group.getMembers(), + ePersonService.findByGroups(context, Set.of(group), -1, -1))); // Assert countByGroups is the same as the size of members assertEquals(group.getMembers().size(), ePersonService.countByGroups(context, Set.of(group))); @@ -1058,12 +1063,14 @@ public void findAndCountByGroups() throws SQLException, AuthorizeException, IOEx assertEquals(4, ePersonService.countByGroups(context, Set.of(group, group2))); // Clean up our data + context.turnOffAuthorisationSystem(); groupService.delete(context, group); groupService.delete(context, group2); ePersonService.delete(context, eperson1); ePersonService.delete(context, eperson2); ePersonService.delete(context, eperson3); ePersonService.delete(context, eperson4); + context.restoreAuthSystemState(); } /** diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index 7666fcfe5431..a056c8061e54 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -22,10 +22,10 @@ import java.util.Collections; import java.util.List; +import org.apache.commons.collections4.CollectionUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; -import org.dspace.builder.GroupBuilder; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; @@ -636,15 +636,20 @@ public void findAndCountByParent() throws SQLException, AuthorizeException, IOEx // Assert that findByParent is the same list of groups as getMemberGroups() when pagination is ignored // (NOTE: Pagination is tested in GroupRestRepositoryIT) - assertEquals(parentGroup.getMemberGroups(), groupService.findByParent(context, parentGroup, -1, -1)); + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be compared + // directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue(CollectionUtils.isEqualCollection(parentGroup.getMemberGroups(), + groupService.findByParent(context, parentGroup, -1, -1))); // Assert countBy parent is the same as the size of group members assertEquals(parentGroup.getMemberGroups().size(), groupService.countByParent(context, parentGroup)); // Clean up our data + context.turnOffAuthorisationSystem(); groupService.delete(context, parentGroup); groupService.delete(context, childGroup); groupService.delete(context, child2Group); groupService.delete(context, child3Group); + context.restoreAuthSystemState(); } From 8b1a0c1daeebaf28944b20667c3318323bc11644 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 18 Sep 2023 10:27:58 -0500 Subject: [PATCH 485/686] Add countAllMembers() with tests. Update tests to use try/catch (cherry picked from commit 58a15b72975940d48ae450e6b46557b4443f2978) --- .../org/dspace/eperson/GroupServiceImpl.java | 15 ++++ .../dspace/eperson/service/GroupService.java | 18 ++++- .../java/org/dspace/eperson/EPersonTest.java | 70 +++++++++++-------- .../java/org/dspace/eperson/GroupTest.java | 60 +++++++++++----- 4 files changed, 115 insertions(+), 48 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 4fdd1a3ba384..faf7b2b52a5e 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -381,6 +381,21 @@ public List allMembers(Context c, Group g) throws SQLException { return new ArrayList<>(childGroupChildren); } + @Override + public int countAllMembers(Context context, Group group) throws SQLException { + // Get all groups which are a member of this group + List group2GroupCaches = group2GroupCacheDAO.findByParent(context, group); + Set groups = new HashSet<>(); + for (Group2GroupCache group2GroupCache : group2GroupCaches) { + groups.add(group2GroupCache.getChild()); + } + // Append current group as well + groups.add(group); + + // Return total number of unique EPerson objects in any of these groups + return ePersonService.countByGroups(context, groups); + } + @Override public Group find(Context context, UUID id) throws SQLException { if (id == null) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java index 634fd0aca20c..ef3949149f14 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java @@ -189,9 +189,11 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException; /** - * Get all of the epeople who are a member of the - * specified group, or a member of a sub-group of the + * Get all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the * specified group, etc. + *

    + * WARNING: This method may have bad performance for Groups with a very large number of members, as it will load + * all member EPerson objects into memory. Only use if you need access to *every* EPerson object at once. * * @param context The relevant DSpace Context. * @param group Group object @@ -200,6 +202,18 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe */ public List allMembers(Context context, Group group) throws SQLException; + /** + * Count all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the + * specified group, etc. + * In other words, this will return the size of "allMembers()" without having to load all EPerson objects into + * memory. + * @param context current DSpace context + * @param group Group object + * @return count of EPerson object members + * @throws SQLException if error + */ + int countAllMembers(Context context, Group group) throws SQLException; + /** * Find the group by its name - assumes name is unique * diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index fb62edec0931..6c162c30d1ad 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -1041,36 +1041,46 @@ public void findAndCountByGroups() throws SQLException, AuthorizeException, IOEx EPerson eperson3 = createEPersonAndAddToGroup("test3@example.com", group); groupService.update(context, group); - // Assert that findByGroup is the same list of EPersons as getMembers() when pagination is ignored - // (NOTE: Pagination is tested in GroupRestRepositoryIT) - // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be compared - // directly to a List. See https://stackoverflow.com/a/57399383/3750035 - assertTrue(CollectionUtils.isEqualCollection(group.getMembers(), - ePersonService.findByGroups(context, Set.of(group), -1, -1))); - // Assert countByGroups is the same as the size of members - assertEquals(group.getMembers().size(), ePersonService.countByGroups(context, Set.of(group))); - - // Add another group with duplicate EPerson - Group group2 = createGroup("anotherGroup"); - groupService.addMember(context, group2, eperson1); - groupService.update(context, group2); - - // Verify countByGroups is still 3 (existing person should not be counted twice) - assertEquals(3, ePersonService.countByGroups(context, Set.of(group, group2))); - - // Add a new EPerson to new group, verify count goes up by one - EPerson eperson4 = createEPersonAndAddToGroup("test4@example.com", group2); - assertEquals(4, ePersonService.countByGroups(context, Set.of(group, group2))); - - // Clean up our data - context.turnOffAuthorisationSystem(); - groupService.delete(context, group); - groupService.delete(context, group2); - ePersonService.delete(context, eperson1); - ePersonService.delete(context, eperson2); - ePersonService.delete(context, eperson3); - ePersonService.delete(context, eperson4); - context.restoreAuthSystemState(); + Group group2 = null; + EPerson eperson4 = null; + + try { + // Assert that findByGroup is the same list of EPersons as getMembers() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be + // compared directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue( + CollectionUtils.isEqualCollection(group.getMembers(), + ePersonService.findByGroups(context, Set.of(group), -1, -1))); + // Assert countByGroups is the same as the size of members + assertEquals(group.getMembers().size(), ePersonService.countByGroups(context, Set.of(group))); + + // Add another group with duplicate EPerson + group2 = createGroup("anotherGroup"); + groupService.addMember(context, group2, eperson1); + groupService.update(context, group2); + + // Verify countByGroups is still 3 (existing person should not be counted twice) + assertEquals(3, ePersonService.countByGroups(context, Set.of(group, group2))); + + // Add a new EPerson to new group, verify count goes up by one + eperson4 = createEPersonAndAddToGroup("test4@example.com", group2); + assertEquals(4, ePersonService.countByGroups(context, Set.of(group, group2))); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, group); + if (group2 != null) { + groupService.delete(context, group2); + } + ePersonService.delete(context, eperson1); + ePersonService.delete(context, eperson2); + ePersonService.delete(context, eperson3); + if (eperson4 != null) { + ePersonService.delete(context, eperson4); + } + context.restoreAuthSystemState(); + } } /** diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index a056c8061e54..0eaacb6194ed 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -606,6 +606,30 @@ public void allMembers() throws SQLException, AuthorizeException, EPersonDeletio } } + @Test + public void countAllMembers() throws SQLException, AuthorizeException, EPersonDeletionException, IOException { + List allEPeopleAdded = new ArrayList<>(); + try { + context.turnOffAuthorisationSystem(); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups1@dspace.org", topGroup)); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups2@dspace.org", level1Group)); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups3@dspace.org", level2Group)); + context.restoreAuthSystemState(); + + assertEquals(3, groupService.countAllMembers(context, topGroup)); + assertEquals(2, groupService.countAllMembers(context, level1Group)); + assertEquals(1, groupService.countAllMembers(context, level2Group)); + } finally { + // Remove all the people added (in order to not impact other tests) + context.turnOffAuthorisationSystem(); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } + } + + @Test public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionException, IOException { assertTrue(groupService.isEmpty(topGroup)); @@ -624,6 +648,7 @@ public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionEx @Test public void findAndCountByParent() throws SQLException, AuthorizeException, IOException { + // Create a parent group with 3 child groups Group parentGroup = createGroup("parentGroup"); Group childGroup = createGroup("childGroup"); @@ -634,22 +659,25 @@ public void findAndCountByParent() throws SQLException, AuthorizeException, IOEx groupService.addMember(context, parentGroup, child3Group); groupService.update(context, parentGroup); - // Assert that findByParent is the same list of groups as getMemberGroups() when pagination is ignored - // (NOTE: Pagination is tested in GroupRestRepositoryIT) - // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be compared - // directly to a List. See https://stackoverflow.com/a/57399383/3750035 - assertTrue(CollectionUtils.isEqualCollection(parentGroup.getMemberGroups(), - groupService.findByParent(context, parentGroup, -1, -1))); - // Assert countBy parent is the same as the size of group members - assertEquals(parentGroup.getMemberGroups().size(), groupService.countByParent(context, parentGroup)); - - // Clean up our data - context.turnOffAuthorisationSystem(); - groupService.delete(context, parentGroup); - groupService.delete(context, childGroup); - groupService.delete(context, child2Group); - groupService.delete(context, child3Group); - context.restoreAuthSystemState(); + try { + // Assert that findByParent is the same list of groups as getMemberGroups() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be + // compared directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue( + CollectionUtils.isEqualCollection(parentGroup.getMemberGroups(), + groupService.findByParent(context, parentGroup, -1, -1))); + // Assert countBy parent is the same as the size of group members + assertEquals(parentGroup.getMemberGroups().size(), groupService.countByParent(context, parentGroup)); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, parentGroup); + groupService.delete(context, childGroup); + groupService.delete(context, child2Group); + groupService.delete(context, child3Group); + context.restoreAuthSystemState(); + } } From a723018e9a656e0c4aa86094e23ce0e15baac261 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 18 Sep 2023 10:58:18 -0500 Subject: [PATCH 486/686] Replace several usages of allMembers() with count methods to avoid performance issues (cherry picked from commit 2c9165afb08126189ee3367347e7011f89227b7c) --- .../dspace/eperson/EPersonServiceImpl.java | 7 +++++-- .../org/dspace/eperson/GroupServiceImpl.java | 19 ++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index 5f17051dbbb0..ce117282de32 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -305,10 +305,13 @@ public void delete(Context context, EPerson ePerson, boolean cascade) throw new AuthorizeException( "You must be an admin to delete an EPerson"); } + // Get all workflow-related groups that the current EPerson belongs to Set workFlowGroups = getAllWorkFlowGroups(context, ePerson); for (Group group: workFlowGroups) { - List ePeople = groupService.allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Get total number of unique EPerson objs who are a member of this group (or subgroup) + int totalMembers = groupService.countAllMembers(context, group); + // If only one EPerson is a member, then we cannot delete the last member of this group. + if (totalMembers == 1) { throw new EmptyWorkflowGroupException(ePerson.getID(), group.getID()); } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index faf7b2b52a5e..d5d7ebcec11f 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -179,8 +179,10 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S for (CollectionRole collectionRole : collectionRoles) { if (StringUtils.equals(collectionRole.getRoleId(), role.getId()) && claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) { - List ePeople = allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Get total number of unique EPerson objs who are a member of this group (or subgroup) + int totalMembers = countAllMembers(context, group); + // If only one EPerson is a member, then we cannot delete the last member of this group. + if (totalMembers == 1) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -191,8 +193,10 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S } } if (!poolTasks.isEmpty()) { - List ePeople = allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Get total number of unique EPerson objs who are a member of this group (or subgroup) + int totalMembers = countAllMembers(context, group); + // If only one EPerson is a member, then we cannot delete the last member of this group. + if (totalMembers == 1) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -212,9 +216,10 @@ public void removeMember(Context context, Group groupParent, Group childGroup) t if (!collectionRoles.isEmpty()) { List poolTasks = poolTaskService.findByGroup(context, groupParent); if (!poolTasks.isEmpty()) { - List parentPeople = allMembers(context, groupParent); - List childPeople = allMembers(context, childGroup); - if (childPeople.containsAll(parentPeople)) { + // Count number of Groups which have this groupParent as a direct parent + int totalChildren = countByParent(context, groupParent); + // If only one group has this as a parent, we cannot delete the last child group + if (totalChildren == 1) { throw new IllegalStateException( "Refused to remove sub group " + childGroup .getID() + " from workflow group because the group " + groupParent From 3caa8b26dca4432a362093361d1b4ff7419b3a35 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 20 Sep 2023 15:15:44 -0500 Subject: [PATCH 487/686] Fix bug in logic for determining whether a workflow group will be left empty. Need to check *both* EPerson and subgroup counts. (cherry picked from commit 9832259aa06d9fe140407ed54c4687989e98f7b2) --- .../org/dspace/eperson/GroupServiceImpl.java | 31 ++++++++++++------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index d5d7ebcec11f..20d29734cbaa 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -179,10 +179,13 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S for (CollectionRole collectionRole : collectionRoles) { if (StringUtils.equals(collectionRole.getRoleId(), role.getId()) && claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) { - // Get total number of unique EPerson objs who are a member of this group (or subgroup) - int totalMembers = countAllMembers(context, group); - // If only one EPerson is a member, then we cannot delete the last member of this group. - if (totalMembers == 1) { + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group)); + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, group); + // If this group has only one direct EPerson and *zero* child groups, then we cannot delete the + // EPerson or we will leave this group empty. + if (totalDirectEPersons == 1 && totalChildGroups == 0) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -193,10 +196,13 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S } } if (!poolTasks.isEmpty()) { - // Get total number of unique EPerson objs who are a member of this group (or subgroup) - int totalMembers = countAllMembers(context, group); - // If only one EPerson is a member, then we cannot delete the last member of this group. - if (totalMembers == 1) { + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group)); + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, group); + // If this group has only one direct EPerson and *zero* child groups, then we cannot delete the + // EPerson or we will leave this group empty. + if (totalDirectEPersons == 1 && totalChildGroups == 0) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -217,9 +223,12 @@ public void removeMember(Context context, Group groupParent, Group childGroup) t List poolTasks = poolTaskService.findByGroup(context, groupParent); if (!poolTasks.isEmpty()) { // Count number of Groups which have this groupParent as a direct parent - int totalChildren = countByParent(context, groupParent); - // If only one group has this as a parent, we cannot delete the last child group - if (totalChildren == 1) { + int totalChildGroups = countByParent(context, groupParent); + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(groupParent)); + // If this group has only one childGroup and *zero* direct EPersons, then we cannot delete the + // childGroup or we will leave this group empty. + if (totalChildGroups == 1 && totalDirectEPersons == 0) { throw new IllegalStateException( "Refused to remove sub group " + childGroup .getID() + " from workflow group because the group " + groupParent From f6003164c8042cd7b7296a298fcadeec5c63ab3c Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 6 Oct 2023 13:25:47 -0500 Subject: [PATCH 488/686] Use join instead of subquery as join seems slightly faster. (cherry picked from commit 9c0bf08cf4c3ab7e941ebe1bae66cf2aea720697) --- .../java/org/dspace/eperson/dao/impl/GroupDAOImpl.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java index f071a1bc754b..ad9c7b54fdb5 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java @@ -199,7 +199,8 @@ public int countRows(Context context) throws SQLException { @Override public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { Query query = createQuery(context, - "from Group where (from Group g where g.id = :parent_id) in elements (parentGroups)"); + "SELECT g FROM Group g JOIN g.parentGroups pg " + + "WHERE pg.id = :parent_id"); query.setParameter("parent_id", parent.getID()); if (pageSize > 0) { query.setMaxResults(pageSize); @@ -213,8 +214,8 @@ public List findByParent(Context context, Group parent, int pageSize, int } public int countByParent(Context context, Group parent) throws SQLException { - Query query = createQuery(context, "SELECT count(*) from Group " + - "where (from Group g where g.id = :parent_id) in elements (parentGroups)"); + Query query = createQuery(context, "SELECT count(g) FROM Group g JOIN g.parentGroups pg " + + "WHERE pg.id = :parent_id"); query.setParameter("parent_id", parent.getID()); return count(query); From 6e63e63039314c592bb5f16e2e7bd11169a92087 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 1 Nov 2023 11:16:09 -0500 Subject: [PATCH 489/686] Address feedback. Initialize HashSet sizes to avoid resizing. Correct comment about indeterminante ordering. (cherry picked from commit f011a5a5dbcd2def47dde7830981cf282ca660aa) --- .../src/main/java/org/dspace/eperson/GroupServiceImpl.java | 7 +++++-- .../src/main/java/org/dspace/eperson/dao/EPersonDAO.java | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 20d29734cbaa..c2f2ea68bdd7 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -382,7 +382,8 @@ public List allMembers(Context c, Group g) throws SQLException { // Get all groups which are a member of this group List group2GroupCaches = group2GroupCacheDAO.findByParent(c, g); - Set groups = new HashSet<>(); + // Initialize HashSet based on List size to avoid Set resizing. See https://stackoverflow.com/a/21822273 + Set groups = new HashSet<>((int) (group2GroupCaches.size() / 0.75 + 1)); for (Group2GroupCache group2GroupCache : group2GroupCaches) { groups.add(group2GroupCache.getChild()); } @@ -399,7 +400,9 @@ public List allMembers(Context c, Group g) throws SQLException { public int countAllMembers(Context context, Group group) throws SQLException { // Get all groups which are a member of this group List group2GroupCaches = group2GroupCacheDAO.findByParent(context, group); - Set groups = new HashSet<>(); + // Initialize HashSet based on List size + current 'group' to avoid Set resizing. + // See https://stackoverflow.com/a/21822273 + Set groups = new HashSet<>((int) ((group2GroupCaches.size() + 1) / 0.75 + 1)); for (Group2GroupCache group2GroupCache : group2GroupCaches) { groups.add(group2GroupCache.getChild()); } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java index 573103f86ad3..9e78e758f92b 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java @@ -39,8 +39,8 @@ public List search(Context context, String query, List q public int searchResultCount(Context context, String query, List queryFields) throws SQLException; /** - * Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. Order is - * indeterminate. + * Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. This returns + * EPersons ordered by UUID. * * @param context current Context * @param groups Set of group(s) to check membership in From 686747bbf82dcf84f49db2471ffba5f938f4b4ac Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Wed, 27 Sep 2023 20:46:39 +0200 Subject: [PATCH 490/686] Allow users with write permission to view hidden metadata (cherry picked from commit 65a17d4390aeab69c191fb75559646aec9dda512) --- .../java/org/dspace/app/rest/converter/ItemConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ItemConverter.java index 77532249ad41..fc64b66e8a16 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ItemConverter.java @@ -67,7 +67,7 @@ public ItemRest convert(Item obj, Projection projection) { * Overrides the parent method to include virtual metadata * @param context The context * @param obj The object of which the filtered metadata will be retrieved - * @return A list of object metadata (including virtual metadata) filtered based on the the hidden metadata + * @return A list of object metadata (including virtual metadata) filtered based on the hidden metadata * configuration */ @Override @@ -79,7 +79,7 @@ public MetadataValueList getPermissionFilteredMetadata(Context context, Item obj Objects.isNull(context.getCurrentUser()) || !authorizeService.isAdmin(context))) { return new MetadataValueList(new ArrayList()); } - if (context != null && authorizeService.isAdmin(context)) { + if (context != null && (authorizeService.isAdmin(context) || itemService.canEdit(context, obj))) { return new MetadataValueList(fullList); } for (MetadataValue mv : fullList) { From 40bb9491a5093e1ea6d3fdd9d7dbb58c9734ec6d Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Fri, 13 Oct 2023 11:15:19 +0200 Subject: [PATCH 491/686] Test modification: allow users with write rights to see hidden metadata (cherry picked from commit df7f6e9f4082e5aef3392932f8a87177ac202655) --- .../java/org/dspace/app/rest/ItemRestRepositoryIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 801976be9f0d..4fee82a0792f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -3021,10 +3021,10 @@ public void testHiddenMetadataForUserWithWriteRights() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) - .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) - .andExpect(jsonPath("$.metadata", matchMetadataDoesNotExist("dc.description.provenance"))); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) + .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) + .andExpect(jsonPath("$.metadata", matchMetadata("dc.description.provenance", "Provenance data"))); } From 42959d5db8fa9ddb9c3bd49ba7c91dd872c63c17 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Fri, 13 Oct 2023 21:21:35 +0200 Subject: [PATCH 492/686] Add test to check that user with read rights can see hidden metadata (cherry picked from commit 03496c36d4d47138bcd51badf8daca720d4cc484) --- .../dspace/app/rest/ItemRestRepositoryIT.java | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 4fee82a0792f..19cb34b297a1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -14,6 +14,7 @@ import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; import static org.dspace.builder.OrcidHistoryBuilder.createOrcidHistory; import static org.dspace.builder.OrcidQueueBuilder.createOrcidQueue; +import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.WRITE; import static org.dspace.orcid.OrcidOperation.DELETE; import static org.dspace.profile.OrcidEntitySyncPreference.ALL; @@ -3028,6 +3029,39 @@ public void testHiddenMetadataForUserWithWriteRights() throws Exception { } + @Test + public void testHiddenMetadataForUserWithReadRights() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + + Item item = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withProvenanceData("Provenance data") + .build(); + + context.restoreAuthSystemState(); + + + ResourcePolicyBuilder.createResourcePolicy(context) + .withUser(eperson) + .withAction(READ) + .withDspaceObject(item) + .build(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/core/items/" + item.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) + .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) + .andExpect(jsonPath("$.metadata", matchMetadataDoesNotExist("dc.description.provenance"))); + + } + @Test public void testEntityTypePerson() throws Exception { context.turnOffAuthorisationSystem(); From 551c84deded21b84de60200205202d7357b4336f Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 20 Oct 2023 15:08:03 -0500 Subject: [PATCH 493/686] Verify optional message is not missing or a literal "null" value (cherry picked from commit 534ee3a699937eedd11aa5cb54f97b081bcda621) --- .../dspace/app/rest/repository/RequestItemRepository.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index f45dbee66f34..6eb631cfa56e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -242,7 +242,10 @@ public RequestItemRest put(Context context, HttpServletRequest request, } JsonNode responseMessageNode = requestBody.findValue("responseMessage"); - String message = responseMessageNode.asText(); + String message = null; + if (responseMessageNode != null && !responseMessageNode.isNull()) { + message = responseMessageNode.asText(); + } ri.setDecision_date(new Date()); requestItemService.update(context, ri); From e6fa5375125659bc0a0d40d93c8bd5a7ffffde06 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Wed, 26 Jul 2023 12:31:43 +0200 Subject: [PATCH 494/686] Change the database mode to READ_ONLY during the indexing by discovery consumer (IndexEventConsumer) (cherry picked from commit 94822b50af4098d990d63e27bb3906cfa9c0ec37) --- .../main/java/org/dspace/discovery/IndexEventConsumer.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index bf1c7da4e150..bafc24c59859 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -205,6 +205,10 @@ public void consume(Context ctx, Event event) throws Exception { @Override public void end(Context ctx) throws Exception { + // Change the mode to readonly to improve the performance + Context.Mode originalMode = ctx.getCurrentMode(); + ctx.setMode(Context.Mode.READ_ONLY); + try { for (String uid : uniqueIdsToDelete) { try { @@ -235,6 +239,8 @@ public void end(Context ctx) throws Exception { createdItemsToUpdate.clear(); } } + + ctx.setMode(originalMode); } private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException { From b194e9c4dd38153231a0a144324a24675751fc67 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Fri, 28 Jul 2023 09:19:37 +0200 Subject: [PATCH 495/686] Add functions to do a manual flush of the db session and call flush before change to READ_ONLY mode to be sure we index the current object (cherry picked from commit c33d3fa87d6c29533d379939bd23b29ff3d9b5c9) --- .../src/main/java/org/dspace/core/Context.java | 10 ++++++++++ .../src/main/java/org/dspace/core/DBConnection.java | 8 ++++++++ .../java/org/dspace/core/HibernateDBConnection.java | 13 +++++++++++++ .../org/dspace/discovery/IndexEventConsumer.java | 10 +++++++--- 4 files changed, 38 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 82b39dd2dfc7..6382e724301d 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -880,6 +880,16 @@ public void uncacheEntity(E entity) throws SQLExcep dbConnection.uncacheEntity(entity); } + /** + * Flush the current Session to synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + public void flushDBChanges() throws SQLException { + dbConnection.flushSession(); + } + public Boolean getCachedAuthorizationResult(DSpaceObject dspaceObject, int action, EPerson eperson) { if (isReadOnly()) { return readOnlyCache.getCachedAuthorizationResult(dspaceObject, action, eperson); diff --git a/dspace-api/src/main/java/org/dspace/core/DBConnection.java b/dspace-api/src/main/java/org/dspace/core/DBConnection.java index cb5825eec1d9..66e4a65dbfe1 100644 --- a/dspace-api/src/main/java/org/dspace/core/DBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/DBConnection.java @@ -148,4 +148,12 @@ public interface DBConnection { * @throws java.sql.SQLException passed through. */ public void uncacheEntity(E entity) throws SQLException; + + /** + * Do a manual flush. This synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + public void flushSession() throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java index 3321e4d837e5..b371af80eede 100644 --- a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java @@ -337,4 +337,17 @@ public void uncacheEntity(E entity) throws SQLExcep } } } + + /** + * Do a manual flush. This synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + @Override + public void flushSession() throws SQLException { + if (getSession().isDirty()) { + getSession().flush(); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index bafc24c59859..cadd5ea056e3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -205,7 +205,11 @@ public void consume(Context ctx, Event event) throws Exception { @Override public void end(Context ctx) throws Exception { - // Change the mode to readonly to improve the performance + // Change the mode to readonly to improve performance + // First, we flush the changes to database, if session is dirty, has pending changes + // to synchronize with database, without this flush it could index an old version of + // the object + ctx.flushDBChanges(); Context.Mode originalMode = ctx.getCurrentMode(); ctx.setMode(Context.Mode.READ_ONLY); @@ -238,9 +242,9 @@ public void end(Context ctx) throws Exception { uniqueIdsToDelete.clear(); createdItemsToUpdate.clear(); } - } - ctx.setMode(originalMode); + ctx.setMode(originalMode); + } } private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException { From 1b0e826bbaab14c51ef0a39e8589e39a7c4b5bd4 Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Fri, 13 Oct 2023 20:52:08 +0200 Subject: [PATCH 496/686] Flush database changes after switching to READONLY mode (cherry picked from commit 00a65312ccb52481cd72653b4c5465b7d16c760e) --- .../main/java/org/dspace/core/Context.java | 19 +++++++++---------- .../dspace/discovery/IndexEventConsumer.java | 4 ---- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 6382e724301d..09b9c4a32dd3 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -810,6 +810,15 @@ public void setMode(Mode newMode) { readOnlyCache.clear(); } + // When going to READ_ONLY, flush database changes to ensure that the current data is retrieved + if (newMode == Mode.READ_ONLY && mode != Mode.READ_ONLY) { + try { + dbConnection.flushSession(); + } catch (SQLException ex) { + log.warn("Unable to flush database changes after switching to READ_ONLY mode", ex); + } + } + //save the new mode mode = newMode; } @@ -880,16 +889,6 @@ public void uncacheEntity(E entity) throws SQLExcep dbConnection.uncacheEntity(entity); } - /** - * Flush the current Session to synchronizes the in-memory state of the Session - * with the database (write changes to the database) - * - * @throws SQLException passed through. - */ - public void flushDBChanges() throws SQLException { - dbConnection.flushSession(); - } - public Boolean getCachedAuthorizationResult(DSpaceObject dspaceObject, int action, EPerson eperson) { if (isReadOnly()) { return readOnlyCache.getCachedAuthorizationResult(dspaceObject, action, eperson); diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index cadd5ea056e3..80602ac80459 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -206,10 +206,6 @@ public void consume(Context ctx, Event event) throws Exception { public void end(Context ctx) throws Exception { // Change the mode to readonly to improve performance - // First, we flush the changes to database, if session is dirty, has pending changes - // to synchronize with database, without this flush it could index an old version of - // the object - ctx.flushDBChanges(); Context.Mode originalMode = ctx.getCurrentMode(); ctx.setMode(Context.Mode.READ_ONLY); From ddbba2d1257981a0d3ed200492350a5abb6702ec Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Fri, 13 Oct 2023 20:59:33 +0200 Subject: [PATCH 497/686] Add test to check retrieving of policies after changing mode to READ_ONLY (cherry picked from commit d19a9599b5f08a567c93d2e167e219673518fb78) --- .../java/org/dspace/core/ContextModeIT.java | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 dspace-api/src/test/java/org/dspace/core/ContextModeIT.java diff --git a/dspace-api/src/test/java/org/dspace/core/ContextModeIT.java b/dspace-api/src/test/java/org/dspace/core/ContextModeIT.java new file mode 100644 index 000000000000..f689551f1a8c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/core/ContextModeIT.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.builder.CommunityBuilder; +import org.junit.Test; + +public class ContextModeIT extends AbstractIntegrationTestWithDatabase { + + AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + @Test + public void testGetPoliciesNewCommunityAfterReadOnlyModeChange() throws Exception { + + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + context.restoreAuthSystemState(); + + context.setMode(Context.Mode.READ_ONLY); + + List policies = authorizeService.getPoliciesActionFilter(context, parentCommunity, + Constants.READ); + + assertEquals("Should return the default anonymous group read policy", 1, policies.size()); + } + +} From 6c211ac19c078c694507fcfe03ac70e444a8303c Mon Sep 17 00:00:00 2001 From: Toni Prieto Date: Fri, 27 Oct 2023 09:11:12 +0200 Subject: [PATCH 498/686] Change class name to ContextIT and correct a test (cherry picked from commit a5567992bbe456cd33c68f695a2364f507149e7a) --- .../org/dspace/core/{ContextModeIT.java => ContextIT.java} | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) rename dspace-api/src/test/java/org/dspace/core/{ContextModeIT.java => ContextIT.java} (83%) diff --git a/dspace-api/src/test/java/org/dspace/core/ContextModeIT.java b/dspace-api/src/test/java/org/dspace/core/ContextIT.java similarity index 83% rename from dspace-api/src/test/java/org/dspace/core/ContextModeIT.java rename to dspace-api/src/test/java/org/dspace/core/ContextIT.java index f689551f1a8c..6cf8336171f2 100644 --- a/dspace-api/src/test/java/org/dspace/core/ContextModeIT.java +++ b/dspace-api/src/test/java/org/dspace/core/ContextIT.java @@ -18,7 +18,7 @@ import org.dspace.builder.CommunityBuilder; import org.junit.Test; -public class ContextModeIT extends AbstractIntegrationTestWithDatabase { +public class ContextIT extends AbstractIntegrationTestWithDatabase { AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); @@ -26,6 +26,11 @@ public class ContextModeIT extends AbstractIntegrationTestWithDatabase { public void testGetPoliciesNewCommunityAfterReadOnlyModeChange() throws Exception { context.turnOffAuthorisationSystem(); + + // First disable the index consumer. The indexing process calls the authorizeService + // function used in this test and may affect the test + context.setDispatcher("noindex"); + parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") .build(); From 9f7e3ee8109810ba0f298f96d122ab11d69de52e Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Fri, 3 Nov 2023 17:49:25 +0100 Subject: [PATCH 499/686] [DSC-1319] Improved RorOrgUnitAuthority --- .../authority/RorOrgUnitAuthority.java | 50 ++++++++++++++++++- .../external/datamodel/ImportRecord.java | 7 +++ .../RorImportMetadataSourceServiceImpl.java | 29 +++++++++-- 3 files changed, 79 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java index c733b95fc089..de2271901819 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/RorOrgUnitAuthority.java @@ -9,14 +9,16 @@ package org.dspace.content.authority; import java.util.Collection; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.authority.factory.ItemAuthorityServiceFactory; import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.ror.service.RorImportMetadataSourceServiceImpl; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -81,7 +83,51 @@ private String getName(ImportRecord orgUnit) { } private Map buildExtras(ImportRecord orgUnit) { - return new HashMap<>(); + + Map extras = new LinkedHashMap(); + + addExtra(extras, getIdentifier(orgUnit), "id"); + + orgUnit.getSingleValue("dc", "type", null) + .ifPresent(type -> addExtra(extras, type, "type")); + + String acronym = orgUnit.getValue("oairecerif", "acronym", null).stream() + .map(MetadatumDTO::getValue) + .collect(Collectors.joining(", ")); + + if (StringUtils.isNotBlank(acronym)) { + addExtra(extras, acronym, "acronym"); + } + + return extras; + } + + private void addExtra(Map extras, String value, String extraType) { + + String key = getKey(extraType); + + if (useAsData(extraType)) { + extras.put("data-" + key, value); + } + if (useForDisplaying(extraType)) { + extras.put(key, value); + } + + } + + private boolean useForDisplaying(String extraType) { + return configurationService.getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + "." + extraType + ".display", true); + } + + private boolean useAsData(String extraType) { + return configurationService.getBooleanProperty("cris.OrcidAuthority." + + getPluginInstanceName() + "." + extraType + ".as-data", true); + } + + private String getKey(String extraType) { + return configurationService.getProperty("cris.OrcidAuthority." + + getPluginInstanceName() + "." + extraType + ".key", "ror_orgunit_" + extraType); } private String composeAuthorityValue(String rorId) { diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java index e2cb24f4b578..06e05a30b302 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java @@ -11,6 +11,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Optional; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -94,6 +95,12 @@ public Collection getValue(String schema, String element, String q return values; } + public Optional getSingleValue(String schema, String element, String qualifier) { + return getValue(schema, element, qualifier).stream() + .map(MetadatumDTO::getValue) + .findFirst(); + } + /** * Add a value to the valueList * diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java index ebc7caefb26d..650e939e3dd0 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/ror/service/RorImportMetadataSourceServiceImpl.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Callable; +import java.util.stream.Collectors; import javax.el.MethodNotFoundException; import com.fasterxml.jackson.core.JsonProcessingException; @@ -69,7 +70,7 @@ public int getRecordsCount(Query query) throws MetadataSourceException { @Override public Collection getRecords(String query, int start, int count) throws MetadataSourceException { - return retry(new SearchByQueryCallable(query)); + return retry(new SearchByQueryCallable(query, start)); } @Override @@ -110,9 +111,10 @@ private class SearchByQueryCallable implements Callable> { private Query query; - private SearchByQueryCallable(String queryString) { + private SearchByQueryCallable(String queryString, int start) { query = new Query(); query.addParameter("query", queryString); + query.addParameter("start", start); } private SearchByQueryCallable(Query query) { @@ -121,7 +123,8 @@ private SearchByQueryCallable(Query query) { @Override public List call() throws Exception { - return search(query.getParameterAsClass("query", String.class)); + return search(query.getParameterAsClass("query", String.class), + query.getParameterAsClass("start", Integer.class)); } } @@ -220,13 +223,16 @@ private List searchById(String id) { return adsResults; } - private List search(String query) { + private List search(String query, Integer start) { List adsResults = new ArrayList<>(); try { Map> params = new HashMap>(); URIBuilder uriBuilder = new URIBuilder(this.url); uriBuilder.addParameter("query", query); + if (start != null) { + uriBuilder.addParameter("page", String.valueOf((start / 20) + 1)); + } String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); if (StringUtils.isEmpty(resp)) { @@ -247,7 +253,20 @@ private List search(String query) { } catch (URISyntaxException e) { e.printStackTrace(); } - return adsResults; + + if (start == null) { + return adsResults; + } + + if (start % 20 == 0) { + return adsResults.stream() + .limit(10) + .collect(Collectors.toList()); + } else { + return adsResults.stream() + .skip(10) + .collect(Collectors.toList()); + } } private JsonNode convertStringJsonToJsonNode(String json) { From 38c81b9f0d89ec6f181ba0a636e1d5fb13502ad8 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Mon, 30 Oct 2023 09:05:36 +0300 Subject: [PATCH 500/686] dspace/config: update spider agent list Update list of spider user agents from the COUNTER-Robots project. See: https://github.com/atmire/COUNTER-Robots (cherry picked from commit 7566a79d906b5050bef01d22c5f4b3e4ab6e4b58) --- dspace/config/spiders/agents/example | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/dspace/config/spiders/agents/example b/dspace/config/spiders/agents/example index f206558d81f6..998431d92a19 100644 --- a/dspace/config/spiders/agents/example +++ b/dspace/config/spiders/agents/example @@ -27,6 +27,7 @@ arks ^Array$ asterias atomz +axios\/\d BDFetch Betsie baidu @@ -45,6 +46,7 @@ BUbiNG bwh3_user_agent CakePHP celestial +centuryb cfnetwork checklink checkprivacy @@ -89,6 +91,7 @@ Embedly EThOS\+\(British\+Library\) facebookexternalhit\/ favorg +Faveeo\/\d FDM(\s|\+)\d Feedbin feedburner @@ -113,6 +116,7 @@ GLMSLinkAnalysis Goldfire(\s|\+)Server google Grammarly +GroupHigh\/\d grub gulliver gvfs\/ @@ -121,16 +125,19 @@ heritrix holmes htdig htmlparser +HeadlessChrome HttpComponents\/1.1 HTTPFetcher http.?client httpget +httpx httrack ia_archiver ichiro iktomi ilse Indy Library +insomnia ^integrity\/\d internetseer intute @@ -140,6 +147,7 @@ iskanie jeeves Jersey\/\d jobo +Koha kyluka larbin libcurl @@ -161,10 +169,12 @@ LongURL.API ltx71 lwp lycos[_+] +MaCoCu mail\.ru MarcEdit mediapartners-google megite +MetaInspector MetaURI[\+\s]API\/\d\.\d Microsoft(\s|\+)URL(\s|\+)Control Microsoft Office Existence Discovery @@ -190,6 +200,7 @@ nagios ^NetAnts\/\d netcraft netluchs +nettle newspaper\/\d ng\/2\. ^Ning\/\d @@ -225,6 +236,7 @@ rambler ReactorNetty\/\d Readpaper redalert +RestSharp Riddler robozilla rss @@ -252,7 +264,7 @@ T\-H\-U\-N\-D\-E\-R\-S\-T\-O\-N\-E tailrank Teleport(\s|\+)Pro Teoma -The\+Knowledge\+AI +The[\+\s]Knowledge[\+\s]AI titan ^Traackr\.com$ Trello @@ -302,6 +314,8 @@ yacy yahoo yandex Yeti\/\d +Zabbix +ZoteroTranslationServer zeus zyborg 7siters From 93977108ccf8d9665091321f5e314f07f7383381 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 11:33:00 +0100 Subject: [PATCH 501/686] [DSC-1229] Fixes ItemServiceTest --- .../org/dspace/discovery/SearchUtils.java | 2 +- .../DiscoveryConfigurationService.java | 99 +++++++++++++++++-- .../dspace/app/rest/OpenSearchController.java | 2 +- dspace/solr/search/conf/schema.xml | 2 +- 4 files changed, 93 insertions(+), 12 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 32b69457de47..a40b32b5fb8c 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -174,7 +174,7 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName( public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( Context context, DSpaceObject dso) { DiscoveryConfigurationService configurationService = getConfigurationService(); - return configurationService.getDiscoveryConfiguration((IndexableObject) dso); + return configurationService.getDiscoveryDSOConfiguration(context, dso); } public static DiscoveryConfigurationService getConfigurationService() { diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 557b7a5a59fb..80d0eb6106db 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,14 +7,22 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; @@ -30,6 +38,13 @@ public class DiscoveryConfigurationService { private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. + */ + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); + public Map getMap() { return map; } @@ -68,24 +83,86 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat * @param indexableObject - The IndexableObject to retrieve the configuration for * @return the discovery configuration for the provided IndexableObject. */ - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject indexableObject) { + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; if (indexableObject == null) { - name = "default"; + return getDiscoveryConfiguration(null); } else if (indexableObject instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) indexableObject).getIndexedObject().getHandle(); + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { name = indexableObject.getUniqueIndexID(); } + return getDiscoveryConfiguration(name); + } - return getDiscoveryConfigurationByNameOrDefault(name); + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { + // Fall back to default configuration + if (dso == null) { + return getDiscoveryConfiguration(null, true); + } + + // Attempt to retrieve cached configuration by UUID + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); + } + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); + } + + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } + + return configuration; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDefault(final String name) { + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { + return getDiscoveryConfiguration(name, true); + } + + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { + DiscoveryConfiguration result; - DiscoveryConfiguration result = getDiscoveryConfigurationByName(name); + result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -93,8 +170,12 @@ public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDefault(final Str return result; } + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDefault(final String name) { + return this.getDiscoveryConfiguration(name, true); + } + public DiscoveryConfiguration getDiscoveryConfigurationByName(String name) { - return StringUtils.isBlank(name) ? null : getMap().get(name); + return this.getDiscoveryConfiguration(name, false); } public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, @@ -102,7 +183,7 @@ public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(null, dso); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index aafbe05b67a1..0e161b128449 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -176,7 +176,7 @@ public void search(HttpServletRequest request, if (dsoObject != null) { container = scopeResolver.resolveScope(context, dsoObject); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfiguration(container); + .getDiscoveryConfiguration(context, container); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index b9c78f1fd847..1a987843d79c 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -299,7 +299,7 @@ - + From c45e7d8dabc0133e0bb63a2efdfe4b4e737a9bea Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Mon, 6 Nov 2023 13:20:35 +0100 Subject: [PATCH 502/686] [DSC-1319] Added tests --- .../external/datamodel/ImportRecord.java | 6 + .../RorImportMetadataSourceServiceIT.java | 137 + .../app/rest/RorOrgUnitAuthorityIT.java | 47 + .../org/dspace/app/rest/ror-record.json | 107 + .../org/dspace/app/rest/ror-records.json | 2383 +++++++++++++++++ 5 files changed, 2680 insertions(+) create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java index 06e05a30b302..b686169f9a21 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java @@ -13,6 +13,7 @@ import java.util.List; import java.util.Optional; +import org.dspace.content.MetadataFieldName; import org.dspace.importer.external.metadatamapping.MetadatumDTO; /** @@ -95,6 +96,11 @@ public Collection getValue(String schema, String element, String q return values; } + public Optional getSingleValue(String field) { + MetadataFieldName metadataFieldName = new MetadataFieldName(field); + return getSingleValue(metadataFieldName.schema, metadataFieldName.element, metadataFieldName.qualifier); + } + public Optional getSingleValue(String schema, String element, String qualifier) { return getValue(schema, element, qualifier).stream() .map(MetadatumDTO::getValue) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java new file mode 100644 index 000000000000..4f8e56f98054 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorImportMetadataSourceServiceIT.java @@ -0,0 +1,137 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.Collection; +import java.util.Optional; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.ror.service.RorImportMetadataSourceServiceImpl; +import org.hamcrest.Matcher; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +public class RorImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClient; + + @Autowired + private RorImportMetadataSourceServiceImpl rorServiceImpl; + + @Test + public void tesGetRecords() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ror-records.json")) { + + String jsonResponse = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(jsonResponse, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + Collection recordsImported = rorServiceImpl.getRecords("test query", 0, 2); + assertThat(recordsImported, hasSize(10)); + + ImportRecord record = recordsImported.iterator().next(); + + assertThat(record.getValueList(), hasSize(11)); + + assertThat(record.getSingleValue("dc.title"), is("The University of Texas")); + assertThat(record.getSingleValue("organization.identifier.ror"), is("https://ror.org/02f6dcw23")); + assertThat(record.getSingleValue("oairecerif.acronym"), is("UTHSCSA")); + assertThat(record.getSingleValue("oairecerif.identifier.url"), is("http://www.uthscsa.edu/")); + assertThat(record.getSingleValue("dc.type"), is("Education")); + assertThat(record.getSingleValue("organization.address.addressCountry"), is("US")); + assertThat(record.getSingleValue("organization.foundingDate"), is("1959")); + assertThat(record.getValue("organization", "identifier", "crossrefid"), hasSize(2)); + assertThat(record.getSingleValue("organization.identifier.isni"), is("0000 0001 0629 5880")); + assertThat(record.getSingleValue("organization.parentOrganization"), is("The University of Texas System")); + + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void tesCount() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ror-records.json")) { + + String jsonResponse = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(jsonResponse, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + Integer count = rorServiceImpl.count("test"); + assertThat(count, equalTo(200)); + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void tesGetRecord() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ror-record.json")) { + + String jsonResponse = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(jsonResponse, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ImportRecord record = rorServiceImpl.getRecord("https://ror.org/01sps7q28"); + assertThat(record.getValueList(), hasSize(9)); + assertThat(record.getSingleValue("dc.title"), is("The University of Texas Health Science Center at Tyler")); + assertThat(record.getSingleValue("organization.identifier.ror"), is("https://ror.org/01sps7q28")); + assertThat(record.getSingleValue("oairecerif.acronym"), is("UTHSCT")); + assertThat(record.getSingleValue("oairecerif.identifier.url"), + is("https://www.utsystem.edu/institutions/university-texas-health-science-center-tyler")); + assertThat(record.getSingleValue("dc.type"), is("Healthcare")); + assertThat(record.getSingleValue("organization.address.addressCountry"), is("US")); + assertThat(record.getSingleValue("organization.foundingDate"), is("1947")); + assertThat(record.getSingleValue("organization.identifier.isni"), is("0000 0000 9704 5790")); + assertThat(record.getSingleValue("organization.parentOrganization"), is("The University of Texas System")); + + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + private Matcher> is(String value) { + return matches(optionalValue -> optionalValue.isPresent() && optionalValue.get().equals(value)); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java new file mode 100644 index 000000000000..e9a42e78ec38 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RorOrgUnitAuthorityIT.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.matcher.ItemAuthorityMatcher.matchItemAuthorityWithOtherInformations; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.util.HashMap; +import java.util.Map; + +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.junit.Test; + +public class RorOrgUnitAuthorityIT extends AbstractControllerIntegrationTest { + + @Test + public void testAuthority() throws Exception { + + Map expectedExtras = new HashMap<>(); + expectedExtras.put("data-ror_orgunit_id", "https://ror.org/02z02cv32"); + expectedExtras.put("ror_orgunit_id", "https://ror.org/02z02cv32"); + expectedExtras.put("data-ror_orgunit_type", "Nonprofit"); + expectedExtras.put("ror_orgunit_type", "Nonprofit"); + expectedExtras.put("data-ror_orgunit_acronym", "WEICan, IEEC"); + expectedExtras.put("ror_orgunit_acronym", "WEICan, IEEC"); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/OrgUnitAuthority/entries") + .param("filter", "test")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", hasSize(10))) + .andExpect(jsonPath("$._embedded.entries", + hasItem(matchItemAuthorityWithOtherInformations("will be referenced::ROR-ID::https://ror.org/02z02cv32", + "Wind Energy Institute of Canada", "Wind Energy Institute of Canada", "vocabularyEntry", + expectedExtras)))); + } + +} diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json new file mode 100644 index 000000000000..51924485b347 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-record.json @@ -0,0 +1,107 @@ +{ + "id": "https://ror.org/01sps7q28", + "name": "The University of Texas Health Science Center at Tyler", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1947, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 32.426014, + "lng": -95.212728, + "state": "Texas", + "state_code": "US-TX", + "city": "Tyler", + "geonames_city": { + "id": 4738214, + "city": "Tyler", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Smith County", + "id": 4729130, + "ascii_name": "Smith County", + "code": "US.TX.423" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "https://www.utsystem.edu/institutions/university-texas-health-science-center-tyler" + ], + "aliases": [ + "East Texas Tuberculosis Sanitarium", + "UT Health Northeast" + ], + "acronyms": [ + "UTHSCT" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Health_Science_Center_at_Tyler", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9704 5790" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "3446655" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q7896437" + ] + }, + "GRID": { + "preferred": "grid.267310.1", + "all": "grid.267310.1" + } + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json new file mode 100644 index 000000000000..91ce8d33e084 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ror-records.json @@ -0,0 +1,2383 @@ +{ + "number_of_results": 200, + "time_taken": 12, + "items": [ + { + "id": "https://ror.org/02f6dcw23", + "name": "The University of Texas", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1959, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Audie L. Murphy Memorial VA Hospital", + "type": "Related", + "id": "https://ror.org/035xhk118" + }, + { + "label": "San Antonio Military Medical Center", + "type": "Related", + "id": "https://ror.org/00m1mwc36" + }, + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 29.508129, + "lng": -98.574025, + "state": "Texas", + "state_code": "US-TX", + "city": "San Antonio", + "geonames_city": { + "id": 4726206, + "city": "San Antonio", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Bexar County", + "id": 4674023, + "ascii_name": "Bexar County", + "code": "US.TX.029" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uthscsa.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UTHSCSA" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Health_Science_Center_at_San_Antonio", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 0629 5880" + ] + }, + "FundRef": { + "preferred": "100008635", + "all": [ + "100008635", + "100008636" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1593427" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q4005868" + ] + }, + "GRID": { + "preferred": "grid.267309.9", + "all": "grid.267309.9" + } + } + }, + { + "id": "https://ror.org/01sps7q28", + "name": "The University of Texas Health Science Center at Tyler", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1947, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 32.426014, + "lng": -95.212728, + "state": "Texas", + "state_code": "US-TX", + "city": "Tyler", + "geonames_city": { + "id": 4738214, + "city": "Tyler", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Smith County", + "id": 4729130, + "ascii_name": "Smith County", + "code": "US.TX.423" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "https://www.utsystem.edu/institutions/university-texas-health-science-center-tyler" + ], + "aliases": [ + "East Texas Tuberculosis Sanitarium", + "UT Health Northeast" + ], + "acronyms": [ + "UTHSCT" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Health_Science_Center_at_Tyler", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9704 5790" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "3446655" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q7896437" + ] + }, + "GRID": { + "preferred": "grid.267310.1", + "all": "grid.267310.1" + } + } + }, + { + "id": "https://ror.org/05byvp690", + "name": "The University of Texas Southwestern Medical Center", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1943, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "Children's Medical Center", + "type": "Related", + "id": "https://ror.org/02ndk3y82" + }, + { + "label": "Parkland Memorial Hospital", + "type": "Related", + "id": "https://ror.org/0208r0146" + }, + { + "label": "VA North Texas Health Care System", + "type": "Related", + "id": "https://ror.org/01nzxq896" + }, + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + }, + { + "label": "Institute for Exercise and Environmental Medicine", + "type": "Child", + "id": "https://ror.org/03gqc7y13" + }, + { + "label": "Texas Health Dallas", + "type": "Child", + "id": "https://ror.org/05k07p323" + } + ], + "addresses": [ + { + "lat": 32.812185, + "lng": -96.840174, + "state": "Texas", + "state_code": "US-TX", + "city": "Dallas", + "geonames_city": { + "id": 4684888, + "city": "Dallas", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Dallas County", + "id": 4684904, + "ascii_name": "Dallas County", + "code": "US.TX.113" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.utsouthwestern.edu/" + ], + "aliases": [ + "UT Southwestern" + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Texas_Southwestern_Medical_Center", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9482 7121" + ] + }, + "FundRef": { + "preferred": "100007914", + "all": [ + "100007914", + "100010487", + "100008260" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "617906" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2725999" + ] + }, + "GRID": { + "preferred": "grid.267313.2", + "all": "grid.267313.2" + } + } + }, + { + "id": "https://ror.org/019kgqr73", + "name": "The University of Texas at Arlington", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1895, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "VA North Texas Health Care System", + "type": "Related", + "id": "https://ror.org/01nzxq896" + }, + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 32.731, + "lng": -97.115, + "state": "Texas", + "state_code": "US-TX", + "city": "Arlington", + "geonames_city": { + "id": 4671240, + "city": "Arlington", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Tarrant County", + "id": 4735638, + "ascii_name": "Tarrant County", + "code": "US.TX.439" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uta.edu/uta/" + ], + "aliases": [ + "UT Arlington" + ], + "acronyms": [ + "UTA" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Texas_at_Arlington", + "labels": [ + { + "label": "Université du Texas à Arlington", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2181 9515" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100009497" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "906409" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q1230739" + ] + }, + "GRID": { + "preferred": "grid.267315.4", + "all": "grid.267315.4" + } + } + }, + { + "id": "https://ror.org/051smbs96", + "name": "The University of Texas of the Permian Basin", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1973, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "The University of Texas System", + "type": "Parent", + "id": "https://ror.org/01gek1696" + } + ], + "addresses": [ + { + "lat": 31.889444, + "lng": -102.329531, + "state": "Texas", + "state_code": "US-TX", + "city": "Odessa", + "geonames_city": { + "id": 5527554, + "city": "Odessa", + "geonames_admin1": { + "name": "Texas", + "id": 4736286, + "ascii_name": "Texas", + "code": "US.TX" + }, + "geonames_admin2": { + "name": "Ector County", + "id": 5520910, + "ascii_name": "Ector County", + "code": "US.TX.135" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.utpb.edu/" + ], + "aliases": [ + "UT Permian Basin" + ], + "acronyms": [ + "UTPB" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Texas_of_the_Permian_Basin", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9140 1491" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1419441" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2495935" + ] + }, + "GRID": { + "preferred": "grid.267328.a", + "all": "grid.267328.a" + } + } + }, + { + "id": "https://ror.org/044vy1d05", + "name": "Tokushima University", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1949, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Tokushima University Hospital", + "type": "Related", + "id": "https://ror.org/021ph5e41" + } + ], + "addresses": [ + { + "lat": 34.07, + "lng": 134.56, + "state": null, + "state_code": null, + "city": "Tokushima", + "geonames_city": { + "id": 1850158, + "city": "Tokushima", + "geonames_admin1": { + "name": "Tokushima", + "id": 1850157, + "ascii_name": "Tokushima", + "code": "JP.39" + }, + "geonames_admin2": { + "name": "Tokushima Shi", + "id": 1850156, + "ascii_name": "Tokushima Shi", + "code": "JP.39.1850156" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 1861060 + } + ], + "links": [ + "https://www.tokushima-u.ac.jp/" + ], + "aliases": [ + "Tokushima Daigaku", + "University of Tokushima" + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Tokushima", + "labels": [ + { + "label": "徳島大学", + "iso639": "ja" + } + ], + "country": { + "country_name": "Japan", + "country_code": "JP" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 1092 3579" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "501100005623" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "15696836" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q1150231" + ] + }, + "GRID": { + "preferred": "grid.267335.6", + "all": "grid.267335.6" + } + } + }, + { + "id": "https://ror.org/03np13864", + "name": "University of Trinidad and Tobago", + "email_address": null, + "ip_addresses": [ + + ], + "established": 2004, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 10.616667, + "lng": -61.216667, + "state": null, + "state_code": null, + "city": "Arima", + "geonames_city": { + "id": 3575051, + "city": "Arima", + "geonames_admin1": { + "name": "Borough of Arima", + "id": 3575052, + "ascii_name": "Borough of Arima", + "code": "TT.01" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 3573591 + } + ], + "links": [ + "https://utt.edu.tt/" + ], + "aliases": [ + + ], + "acronyms": [ + "UTT" + ], + "status": "active", + "wikipedia_url": "https://en.wikipedia.org/wiki/University_of_Trinidad_and_Tobago", + "labels": [ + { + "label": "Universidad de Trinidad y Tobago", + "iso639": "es" + } + ], + "country": { + "country_name": "Trinidad and Tobago", + "country_code": "TT" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9490 0886" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "8706288" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q648244" + ] + }, + "GRID": { + "preferred": "grid.267355.0", + "all": "grid.267355.0" + } + } + }, + { + "id": "https://ror.org/04wn28048", + "name": "University of Tulsa", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1894, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 36.152222, + "lng": -95.946389, + "state": "Oklahoma", + "state_code": "US-OK", + "city": "Tulsa", + "geonames_city": { + "id": 4553433, + "city": "Tulsa", + "geonames_admin1": { + "name": "Oklahoma", + "id": 4544379, + "ascii_name": "Oklahoma", + "code": "US.OK" + }, + "geonames_admin2": { + "name": "Tulsa County", + "id": 4553440, + "ascii_name": "Tulsa County", + "code": "US.OK.143" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://utulsa.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "TU" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Tulsa", + "labels": [ + { + "label": "Université de tulsa", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2160 264X" + ] + }, + "FundRef": { + "preferred": "100007147", + "all": [ + "100007147", + "100006455" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "32043" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q1848657" + ] + }, + "GRID": { + "preferred": "grid.267360.6", + "all": "grid.267360.6" + } + } + }, + { + "id": "https://ror.org/04scfb908", + "name": "Alfred Health", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1871, + "types": [ + "Healthcare" + ], + "relationships": [ + { + "label": "Caulfield Hospital", + "type": "Child", + "id": "https://ror.org/01fcxf261" + }, + { + "label": "Melbourne Sexual Health Centre", + "type": "Child", + "id": "https://ror.org/013fdz725" + }, + { + "label": "National Trauma Research Institute", + "type": "Child", + "id": "https://ror.org/048t93218" + }, + { + "label": "The Alfred Hospital", + "type": "Child", + "id": "https://ror.org/01wddqe20" + } + ], + "addresses": [ + { + "lat": -37.845542, + "lng": 144.981632, + "state": "Victoria", + "state_code": "AU-VIC", + "city": "Melbourne", + "geonames_city": { + "id": 2158177, + "city": "Melbourne", + "geonames_admin1": { + "name": "Victoria", + "id": 2145234, + "ascii_name": "Victoria", + "code": "AU.07" + }, + "geonames_admin2": { + "name": "Melbourne", + "id": 7839805, + "ascii_name": "Melbourne", + "code": "AU.07.24600" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 2077456 + } + ], + "links": [ + "http://www.alfred.org.au/" + ], + "aliases": [ + + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "", + "labels": [ + + ], + "country": { + "country_name": "Australia", + "country_code": "AU" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 0432 5259" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "501100002716" + ] + }, + "GRID": { + "preferred": "grid.267362.4", + "all": "grid.267362.4" + } + } + }, + { + "id": "https://ror.org/02c2f8975", + "name": "University of Ulsan", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1970, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Ulsan University Hospital", + "type": "Related", + "id": "https://ror.org/03sab2a45" + } + ], + "addresses": [ + { + "lat": 35.542772, + "lng": 129.256725, + "state": null, + "state_code": null, + "city": "Ulsan", + "geonames_city": { + "id": 1833747, + "city": "Ulsan", + "geonames_admin1": { + "name": "Ulsan", + "id": 1833742, + "ascii_name": "Ulsan", + "code": "KR.21" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 1835841 + } + ], + "links": [ + "http://en.ulsan.ac.kr/contents/main/" + ], + "aliases": [ + + ], + "acronyms": [ + "UOU" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Ulsan", + "labels": [ + { + "label": "울산대학교", + "iso639": "ko" + } + ], + "country": { + "country_name": "South Korea", + "country_code": "KR" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 0533 4667" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "501100002568" + ] + }, + "OrgRef": { + "preferred": "10458246", + "all": [ + "10458246", + "15162872" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q491717" + ] + }, + "GRID": { + "preferred": "grid.267370.7", + "all": "grid.267370.7" + } + } + }, + { + "id": "https://ror.org/010acrp16", + "name": "University of West Alabama", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1835, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 32.59, + "lng": -88.186, + "state": "Alabama", + "state_code": "US-AL", + "city": "Livingston", + "geonames_city": { + "id": 4073383, + "city": "Livingston", + "geonames_admin1": { + "name": "Alabama", + "id": 4829764, + "ascii_name": "Alabama", + "code": "US.AL" + }, + "geonames_admin2": { + "name": "Sumter County", + "id": 4092386, + "ascii_name": "Sumter County", + "code": "US.AL.119" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwa.edu/" + ], + "aliases": [ + "Livingston Female Academy" + ], + "acronyms": [ + "UWA" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_West_Alabama", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9963 9197" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "2425212" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q637346" + ] + }, + "GRID": { + "preferred": "grid.267434.0", + "all": "grid.267434.0" + } + } + }, + { + "id": "https://ror.org/002w4zy91", + "name": "University of West Florida", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1963, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "State University System of Florida", + "type": "Parent", + "id": "https://ror.org/05sqd3t97" + } + ], + "addresses": [ + { + "lat": 30.549493, + "lng": -87.21812, + "state": "Florida", + "state_code": "US-FL", + "city": "Pensacola", + "geonames_city": { + "id": 4168228, + "city": "Pensacola", + "geonames_admin1": { + "name": "Florida", + "id": 4155751, + "ascii_name": "Florida", + "code": "US.FL" + }, + "geonames_admin2": { + "name": "Escambia County", + "id": 4154550, + "ascii_name": "Escambia County", + "code": "US.FL.033" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://uwf.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWF" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_West_Florida", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2112 2427" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100009842" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "750756" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q659255" + ] + }, + "GRID": { + "preferred": "grid.267436.2", + "all": "grid.267436.2" + } + } + }, + { + "id": "https://ror.org/01cqxk816", + "name": "University of West Georgia", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1906, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University System of Georgia", + "type": "Parent", + "id": "https://ror.org/017wcm924" + } + ], + "addresses": [ + { + "lat": 33.573357, + "lng": -85.099593, + "state": "Georgia", + "state_code": "US-GA", + "city": "Carrollton", + "geonames_city": { + "id": 4186416, + "city": "Carrollton", + "geonames_admin1": { + "name": "Georgia", + "id": 4197000, + "ascii_name": "Georgia", + "code": "US.GA" + }, + "geonames_admin2": { + "name": "Carroll County", + "id": 4186396, + "ascii_name": "Carroll County", + "code": "US.GA.045" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.westga.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWG" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_West_Georgia", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2223 6696" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100007922" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "595315" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2495945" + ] + }, + "GRID": { + "preferred": "grid.267437.3", + "all": "grid.267437.3" + } + } + }, + { + "id": "https://ror.org/03c8vvr84", + "name": "University of Western States", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1904, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 45.543351, + "lng": -122.523973, + "state": "Oregon", + "state_code": "US-OR", + "city": "Portland", + "geonames_city": { + "id": 5746545, + "city": "Portland", + "geonames_admin1": { + "name": "Oregon", + "id": 5744337, + "ascii_name": "Oregon", + "code": "US.OR" + }, + "geonames_admin2": { + "name": "Multnomah County", + "id": 5742126, + "ascii_name": "Multnomah County", + "code": "US.OR.051" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uws.edu/" + ], + "aliases": [ + "Western States Chiropractic College" + ], + "acronyms": [ + "UWS" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Western_States", + "labels": [ + + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 0455 9493" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1655050" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q7896612" + ] + }, + "GRID": { + "preferred": "grid.267451.3", + "all": "grid.267451.3" + } + } + }, + { + "id": "https://ror.org/03fmjzx88", + "name": "University of Winchester", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1840, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 51.060338, + "lng": -1.325418, + "state": null, + "state_code": null, + "city": "Winchester", + "geonames_city": { + "id": 2633858, + "city": "Winchester", + "geonames_admin1": { + "name": "England", + "id": 6269131, + "ascii_name": "England", + "code": "GB.ENG" + }, + "geonames_admin2": { + "name": "Hampshire", + "id": 2647554, + "ascii_name": "Hampshire", + "code": "GB.ENG.F2" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": "SOUTH EAST (ENGLAND)", + "code": "UKJ" + }, + "nuts_level2": { + "name": "Hampshire and Isle of Wight", + "code": "UKJ3" + }, + "nuts_level3": { + "name": "Central Hampshire", + "code": "UKJ36" + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 2635167 + } + ], + "links": [ + "http://www.winchester.ac.uk/pages/home.aspx" + ], + "aliases": [ + + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Winchester", + "labels": [ + + ], + "country": { + "country_name": "United Kingdom", + "country_code": "GB" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0000 9422 2878" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100010057" + ] + }, + "HESA": { + "preferred": null, + "all": [ + "0021" + ] + }, + "UCAS": { + "preferred": null, + "all": [ + "W76" + ] + }, + "UKPRN": { + "preferred": null, + "all": [ + "10003614" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "3140939" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q3551690" + ] + }, + "GRID": { + "preferred": "grid.267454.6", + "all": "grid.267454.6" + } + } + }, + { + "id": "https://ror.org/01gw3d370", + "name": "University of Windsor", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1857, + "types": [ + "Education" + ], + "relationships": [ + + ], + "addresses": [ + { + "lat": 42.305196, + "lng": -83.067483, + "state": "Ontario", + "state_code": "CA-ON", + "city": "Windsor", + "geonames_city": { + "id": 6182962, + "city": "Windsor", + "geonames_admin1": { + "name": "Ontario", + "id": 6093943, + "ascii_name": "Ontario", + "code": "CA.08" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6251999 + } + ], + "links": [ + "http://www.uwindsor.ca/" + ], + "aliases": [ + "UWindsor", + "Assumption University of Windsor" + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Windsor", + "labels": [ + { + "label": "Université de windsor", + "iso639": "fr" + } + ], + "country": { + "country_name": "Canada", + "country_code": "CA" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0004 1936 9596" + ] + }, + "FundRef": { + "preferred": "100009154", + "all": [ + "100009154", + "501100000083" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "342733" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2065769" + ] + }, + "GRID": { + "preferred": "grid.267455.7", + "all": "grid.267455.7" + } + } + }, + { + "id": "https://ror.org/02gdzyx04", + "name": "University of Winnipeg", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1871, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "Winnipeg Institute for Theoretical Physics", + "type": "Child", + "id": "https://ror.org/010tw2j24" + } + ], + "addresses": [ + { + "lat": 49.890122, + "lng": -97.153367, + "state": "Manitoba", + "state_code": "CA-MB", + "city": "Winnipeg", + "geonames_city": { + "id": 6183235, + "city": "Winnipeg", + "geonames_admin1": { + "name": "Manitoba", + "id": 6065171, + "ascii_name": "Manitoba", + "code": "CA.03" + }, + "geonames_admin2": { + "name": null, + "id": null, + "ascii_name": null, + "code": null + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6251999 + } + ], + "links": [ + "http://www.uwinnipeg.ca/" + ], + "aliases": [ + + ], + "acronyms": [ + + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Winnipeg", + "labels": [ + { + "label": "Université de winnipeg", + "iso639": "fr" + } + ], + "country": { + "country_name": "Canada", + "country_code": "CA" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 1703 4731" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100009367" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "587404" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q472167" + ] + }, + "GRID": { + "preferred": "grid.267457.5", + "all": "grid.267457.5" + } + } + }, + { + "id": "https://ror.org/03mnm0t94", + "name": "University of Wisconsin–Eau Claire", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1916, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University of Wisconsin System", + "type": "Parent", + "id": "https://ror.org/03ydkyb10" + } + ], + "addresses": [ + { + "lat": 44.79895, + "lng": -91.499346, + "state": "Wisconsin", + "state_code": "US-WI", + "city": "Eau Claire", + "geonames_city": { + "id": 5251436, + "city": "Eau Claire", + "geonames_admin1": { + "name": "Wisconsin", + "id": 5279468, + "ascii_name": "Wisconsin", + "code": "US.WI" + }, + "geonames_admin2": { + "name": "Eau Claire County", + "id": 5251439, + "ascii_name": "Eau Claire County", + "code": "US.WI.035" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwec.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWEC" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Wisconsin%E2%80%93Eau_Claire", + "labels": [ + { + "label": "Université du Wisconsin à Eau Claire", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2227 2494" + ] + }, + "FundRef": { + "preferred": null, + "all": [ + "100010315" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "496729" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q3551771" + ] + }, + "GRID": { + "preferred": "grid.267460.1", + "all": "grid.267460.1" + } + } + }, + { + "id": "https://ror.org/05hbexn54", + "name": "University of Wisconsin–Green Bay", + "email_address": null, + "ip_addresses": [ + + ], + "established": 1965, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University of Wisconsin System", + "type": "Parent", + "id": "https://ror.org/03ydkyb10" + } + ], + "addresses": [ + { + "lat": 44.533203, + "lng": -87.921521, + "state": "Wisconsin", + "state_code": "US-WI", + "city": "Green Bay", + "geonames_city": { + "id": 5254962, + "city": "Green Bay", + "geonames_admin1": { + "name": "Wisconsin", + "id": 5279468, + "ascii_name": "Wisconsin", + "code": "US.WI" + }, + "geonames_admin2": { + "name": "Brown County", + "id": 5246898, + "ascii_name": "Brown County", + "code": "US.WI.009" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwgb.edu/" + ], + "aliases": [ + + ], + "acronyms": [ + "UWGB" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Wisconsin%E2%80%93Green_Bay", + "labels": [ + { + "label": "Université du Wisconsin–Green Bay", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 0559 7692" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "1513886" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2378091" + ] + }, + "GRID": { + "preferred": "grid.267461.0", + "all": "grid.267461.0" + } + } + }, + { + "id": "https://ror.org/00x8ccz20", + "name": "University of Wisconsin–La Crosse", + "email_address": "", + "ip_addresses": [ + + ], + "established": 1909, + "types": [ + "Education" + ], + "relationships": [ + { + "label": "University of Wisconsin System", + "type": "Parent", + "id": "https://ror.org/03ydkyb10" + } + ], + "addresses": [ + { + "lat": 43.815576, + "lng": -91.233517, + "state": "Wisconsin", + "state_code": "US-WI", + "city": "La Crosse", + "geonames_city": { + "id": 5258957, + "city": "La Crosse", + "geonames_admin1": { + "name": "Wisconsin", + "id": 5279468, + "ascii_name": "Wisconsin", + "code": "US.WI" + }, + "geonames_admin2": { + "name": "La Crosse County", + "id": 5258961, + "ascii_name": "La Crosse County", + "code": "US.WI.063" + }, + "license": { + "attribution": "Data from geonames.org under a CC-BY 3.0 license", + "license": "http://creativecommons.org/licenses/by/3.0/" + }, + "nuts_level1": { + "name": null, + "code": null + }, + "nuts_level2": { + "name": null, + "code": null + }, + "nuts_level3": { + "name": null, + "code": null + } + }, + "postcode": null, + "primary": false, + "line": null, + "country_geonames_id": 6252001 + } + ], + "links": [ + "http://www.uwlax.edu/Home/Future-Students/" + ], + "aliases": [ + + ], + "acronyms": [ + "UW–L" + ], + "status": "active", + "wikipedia_url": "http://en.wikipedia.org/wiki/University_of_Wisconsin%E2%80%93La_Crosse", + "labels": [ + { + "label": "Université du Wisconsin–La Crosse", + "iso639": "fr" + } + ], + "country": { + "country_name": "United States", + "country_code": "US" + }, + "external_ids": { + "ISNI": { + "preferred": null, + "all": [ + "0000 0001 2169 5137" + ] + }, + "OrgRef": { + "preferred": null, + "all": [ + "2422287" + ] + }, + "Wikidata": { + "preferred": null, + "all": [ + "Q2688358" + ] + }, + "GRID": { + "preferred": "grid.267462.3", + "all": "grid.267462.3" + } + } + } + ], + "meta": { + "types": [ + { + "id": "company", + "title": "Company", + "count": 29790 + }, + { + "id": "education", + "title": "Education", + "count": 20325 + }, + { + "id": "nonprofit", + "title": "Nonprofit", + "count": 14187 + }, + { + "id": "healthcare", + "title": "Healthcare", + "count": 13107 + }, + { + "id": "facility", + "title": "Facility", + "count": 10080 + }, + { + "id": "other", + "title": "Other", + "count": 8369 + }, + { + "id": "government", + "title": "Government", + "count": 6511 + }, + { + "id": "archive", + "title": "Archive", + "count": 2967 + } + ], + "countries": [ + { + "id": "us", + "title": "United States", + "count": 31196 + }, + { + "id": "gb", + "title": "United Kingdom", + "count": 7410 + }, + { + "id": "de", + "title": "Germany", + "count": 5189 + }, + { + "id": "cn", + "title": "China", + "count": 4846 + }, + { + "id": "fr", + "title": "France", + "count": 4344 + }, + { + "id": "jp", + "title": "Japan", + "count": 3940 + }, + { + "id": "ca", + "title": "Canada", + "count": 3392 + }, + { + "id": "in", + "title": "India", + "count": 3075 + }, + { + "id": "cz", + "title": "Czech Republic", + "count": 2780 + }, + { + "id": "ru", + "title": "Russia", + "count": 2109 + } + ], + "statuses": [ + { + "id": "active", + "title": "active", + "count": 105336 + } + ] + } +} \ No newline at end of file From 3b70c3ce00a4f748633433ac78666bc53f4250a9 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 15:11:39 +0100 Subject: [PATCH 503/686] [DSC-1229] Fixes BulkAccessControlIT --- .../dspace/app/launcher/ScriptLauncher.java | 10 ++- .../cli/DSpaceSkipUnknownArgumentsParser.java | 77 +++++++++++++++++++ .../org/dspace/scripts/DSpaceRunnable.java | 56 +++++++++++++- .../configuration/ScriptConfiguration.java | 14 ++++ .../BulkAccessControlIT.java | 36 ++++----- .../config/spring/api/scripts.xml | 5 ++ .../config/spring/rest/scripts.xml | 5 -- 7 files changed, 175 insertions(+), 28 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index e7d4745c952f..723ba91fb946 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -22,6 +22,7 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.DSpaceRunnable.StepResult; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -165,8 +166,13 @@ public static int handleScript(String[] args, Document commandConfigs, private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, DSpaceRunnable script, EPerson currentUser) { try { - script.initialize(args, dSpaceRunnableHandler, currentUser); - script.run(); + StepResult result = script.initialize(args, dSpaceRunnableHandler, currentUser); + // check the StepResult, only run the script if the result is Continue; + // otherwise - for example the script is started with the help as argument, nothing is to do + if (StepResult.Continue.equals(result)) { + // runs the script, the normal initialization is successful + script.run(); + } return 0; } catch (ParseException e) { script.printHelp(); diff --git a/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java new file mode 100644 index 000000000000..afd74a588d17 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.cli; + +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + +/** + * Extended version of the DefaultParser. This parser skip/ignore unknown arguments. + */ +public class DSpaceSkipUnknownArgumentsParser extends DefaultParser { + + + @Override + public CommandLine parse(Options options, String[] arguments) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments)); + } + + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, boolean stopAtNonOption) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), stopAtNonOption); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param properties command line option name-value pairs + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties, boolean stopAtNonOption) + throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties, stopAtNonOption); + } + + + private String[] getOnlyKnownArguments(Options options, String[] arguments) { + List knownArguments = new ArrayList<>(); + for (String arg : arguments) { + if (options.hasOption(arg)) { + knownArguments.add(arg); + } + } + return knownArguments.toArray(new String[0]); + } +} diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 2319aee31752..2ea0a52d6e34 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -18,6 +18,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; +import org.dspace.cli.DSpaceSkipUnknownArgumentsParser; import org.dspace.eperson.EPerson; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -36,6 +37,11 @@ public abstract class DSpaceRunnable implements R */ protected CommandLine commandLine; + /** + * The minimal CommandLine object for the script that'll hold help information + */ + protected CommandLine helpCommandLine; + /** * This EPerson identifier variable is the UUID of the EPerson that's running the script */ @@ -64,26 +70,66 @@ private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { * @param args The arguments given to the script * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran * @param currentUser + * @return the result of this step; StepResult.Continue: continue the normal process, + * initialize is successful; otherwise exit the process (the help or version is shown) * @throws ParseException If something goes wrong */ - public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, + public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, EPerson currentUser) throws ParseException { if (currentUser != null) { this.setEpersonIdentifier(currentUser.getID()); } this.setHandler(dSpaceRunnableHandler); - this.parse(args); + + // parse the command line in a first step for the help options + // --> no other option is required + StepResult result = this.parseForHelp(args); + switch (result) { + case Exit: + // arguments of the command line matches the help options, handle this + handleHelpCommandLine(); + break; + + case Continue: + // arguments of the command line matches NOT the help options, parse the args for the normal options + result = this.parse(args); + break; + default: + break; + } + + return result; + } + + + /** + * This method handle the help command line. In this easy implementation only the help is printed. For more + * complexity override this method. + */ + private void handleHelpCommandLine() { + printHelp(); } + /** * This method will take the primitive array of String objects that represent the parameters given to the String * and it'll parse these into a CommandLine object that can be used by the script to retrieve the data * @param args The primitive array of Strings representing the parameters * @throws ParseException If something goes wrong */ - private void parse(String[] args) throws ParseException { + private StepResult parse(String[] args) throws ParseException { commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args); setup(); + return StepResult.Continue; + } + + private StepResult parseForHelp(String[] args) throws ParseException { + helpCommandLine = new DSpaceSkipUnknownArgumentsParser().parse(getScriptConfiguration().getHelpOptions(), args); + if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { + return StepResult.Exit; + } + + return StepResult.Continue; } /** @@ -158,4 +204,8 @@ public UUID getEpersonIdentifier() { public void setEpersonIdentifier(UUID epersonIdentifier) { this.epersonIdentifier = epersonIdentifier; } + + public enum StepResult { + Continue, Exit; + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 2da8658c8392..5da7888ae11f 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -10,6 +10,7 @@ import java.sql.SQLException; import java.util.List; +import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; @@ -109,6 +110,19 @@ public boolean isAllowedToExecute(Context context) { */ public abstract Options getOptions(); + /** + * The getter for the options of the Script (help informations) + * + * @return the options value of this ScriptConfiguration for help + */ + public Options getHelpOptions() { + Options options = new Options(); + + options.addOption(Option.builder("h").longOpt("help").desc("help").hasArg(false).required(false).build()); + + return options; + } + @Override public void setBeanName(String beanName) { this.name = beanName; diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java index 73f02e40494c..60a0d629debc 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -148,7 +148,7 @@ public void performBulkAccessWithAnonymousEPersonTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -191,7 +191,7 @@ public void performBulkAccessWithNotExistingEPersonTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -232,7 +232,7 @@ public void performBulkAccessWithNotAdminEPersonTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -396,7 +396,7 @@ public void performBulkAccessWithNotCollectionAdminEPersonTest() throws Exceptio TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -440,7 +440,7 @@ public void performBulkAccessWithNotCommunityAdminEPersonTest() throws Exception TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -488,7 +488,7 @@ public void performBulkAccessWithNotItemAdminEPersonTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -506,7 +506,7 @@ public void performBulkAccessWithoutRequiredParamTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -538,7 +538,7 @@ public void performBulkAccessWithEmptyJsonTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -579,7 +579,7 @@ public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -619,7 +619,7 @@ public void performBulkAccessWithMissingModeOfItemValueTest() throws Exception { TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -660,7 +660,7 @@ public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exceptio TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -700,7 +700,7 @@ public void performBulkAccessWithMissingModeOfBitstreamValueTest() throws Except TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -740,7 +740,7 @@ public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Except TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -782,7 +782,7 @@ public void performBulkAccessWithInvalidEmbargoAccessConditionDateTest() throws TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(3)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -824,7 +824,7 @@ public void performBulkAccessWithInvalidLeaseAccessConditionDateTest() throws Ex TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(3)); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( @@ -868,7 +868,7 @@ public void performBulkAccessForCommunityItemsWithBitstreamConstraintsTest() thr ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( containsString("constraint is not supported when uuid isn't an Item") )); @@ -915,7 +915,7 @@ public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() thro ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( containsString("constraint isn't supported when multiple uuids are provided") )); @@ -1031,7 +1031,7 @@ public void performBulkAccessWithAddModeAndEmptyAccessConditionsTest() throws Ex ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); - assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(2)); assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( containsString("accessConditions of item must be provided with mode") )); diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml index 6facf51941df..cee2bf0f0629 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -32,4 +32,9 @@ + + + + + diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index 1f668b9e1616..b40cfa1704a5 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -63,9 +63,4 @@ - - - - - From 5812c0cfb4145afc43d173a1a08509dab9c104be Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 16:43:36 +0100 Subject: [PATCH 504/686] [DSC-1229] Fixes SQL headers --- .../h2/V7.0_2021.03.26__process_to_group.sql | 8 ++++++++ .../oracle/V7.3_2022.06.16__process_to_group.sql | 14 ++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql index 833765ec3c4a..01d5e709c1cb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.03.26__process_to_group.sql @@ -5,6 +5,14 @@ -- -- http://www.dspace.org/license/ -- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== ------------------------------------------------------------------------------- -- Sequences for Process within Group feature ------------------------------------------------------------------------------- diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql index 8b137891791f..5ebd41a866d0 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql @@ -1 +1,15 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== From fe3d670a59236053f7bc1896935b026003aae19f Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 16:44:15 +0100 Subject: [PATCH 505/686] [DSC-1229] Fixes DocumentCrosswalkIT and ReferCrosswalkIT --- .../config/spring/api/test-discovery.xml | 844 ++++++++++++++++++ 1 file changed, 844 insertions(+) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index ff446e7558f5..60642118bb39 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -113,6 +113,29 @@ + + + + + + + + + + + + + + + + + + + + + + + @@ -2119,6 +2142,563 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + projectinvestigators_authority:{0} AND search.resourcetype:Item + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + author_authority:{0} AND search.resourcetype:Item + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + '{'!join from=search.resourceid to=projectinvestigators_authority fromIndex=${solr.multicorePrefix}search'}'person.affiliation.name_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.description.sponsorship_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + '{'!join from=search.resourceid to=author_authority fromIndex=${solr.multicorePrefix}search'}'person.affiliation.name_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + person.affiliation.name_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + involvedorganisation_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + organization.parentOrganization_authority:{0} + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.relation.project_authority:{0} AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.relation.project_authority:{0} AND entityType_keyword:Funding + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + dc.relation.project_authority:{0} AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + location.coll:{0} + + + + + + + + + + + location.comm:{0} + + + + + + + + + + + search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + search.resourcetype:Collection + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -2847,6 +3427,122 @@ + + + + + + crispj.investigator + crispj.coinvestigators + + + + + + + + + + + + + + + dc.contributor.editor + dc.creator + + + + + + + + + + + + + + + oairecerif.author.affiliation + oairecerif.editor.affiliation + + + + + + + + + + + + + + + dc.relation.funding + + + + + + + + + + + + + + + person.identifier.orcid + + + + + + + + + + + cris.legacyId + + + + + + + + + + + dc.identifier.doi + + + + + + + + + + + dc.type + + + + + + + + + dc.language.iso + person.knowsLanguage + + + @@ -2938,5 +3634,153 @@ + + + + + + + + + + relation.isProjectsSelectedFor + {0} + + + + + + + + + + + + relation.isResearchoutputsSelectedFor + {0} + + + + + + + + + + + + relation.isRpprojectsSelectedFor + {0} + + + + + + + + + + + + relation.isPublicationsSelectedFor + {0} + + + + + + + + + + + + relation.isRppublicationsSelectedFor + {0} + + + + + + + + + + + + relation.isPeopleSelectedFor + {0} + + + + + + + + + + + + relation.isOrganizationsSelectedFor + {0} + + + + + + + + + + + + relation.isGrantsSelectedFor + {0} + + + + + + + + + + + + + + + + + dc.type + + + + + + + + + + + + + + + + + + + + + dc.date.issued + + + + + + + From 5bf7c21cf26fb73796ccd9a6d6ae42dfda211bce Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 16:50:17 +0100 Subject: [PATCH 506/686] [DSC-1229] Fixes S3BitStoreServiceIT --- .../java/org/dspace/storage/bitstore/S3BitStoreService.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index f79b84b30c1e..21840f453112 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -44,7 +44,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.lang3.StringUtils; @@ -302,10 +301,6 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { ) { Utils.bufferedCopy(dis, fos); in.close(); - byte[] md5Digest = dis.getMessageDigest().digest(); - String md5Base64 = Base64.encodeBase64String(md5Digest); - ObjectMetadata objMetadata = new ObjectMetadata(); - objMetadata.setContentMD5(md5Base64); Upload upload = tm.upload(bucketName, key, scratchFile); From 7b92709ee83aed48e212bf21f630ab9dba740ae1 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 17:01:07 +0100 Subject: [PATCH 507/686] [DSC-1229] Fixes checkstyle issues --- .../src/main/java/org/dspace/app/util/DCInputSet.java | 5 ++--- .../dspace/statistics/SolrLoggerUsageEventListener.java | 3 ++- .../src/test/java/org/dspace/discovery/DiscoveryIT.java | 7 ------- .../org/dspace/app/rest/BrowsesResourceControllerIT.java | 5 ++--- .../java/org/dspace/app/rest/ScriptRestRepositoryIT.java | 3 --- .../org/dspace/app/rest/SubmissionFormsControllerIT.java | 3 --- 6 files changed, 6 insertions(+), 20 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index afd661ded1fb..6feb1e247551 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -11,7 +11,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Objects; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -104,9 +103,9 @@ public boolean isDefinedMultTitles() { * @return true if the current set has all the prev. published fields */ public boolean isDefinedPubBefore() { - return (isFieldPresent("dc.date.issued") && + return isFieldPresent("dc.date.issued") && isFieldPresent("dc.identifier.citation") && - isFieldPresent("dc.publisher")); + isFieldPresent("dc.publisher"); } /** diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java index 9825f3bfae36..cef8702bf41a 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java @@ -51,7 +51,8 @@ public void receiveEvent(Event event) { if (UsageEvent.Action.VIEW == ue.getAction()) { if (ue.getRequest() != null) { - solrLoggerService.postView(ue.getObject(), ue.getRequest(), currentUser, ue.getReferrer(), new Date()); + solrLoggerService + .postView(ue.getObject(), ue.getRequest(), currentUser, ue.getReferrer(), new Date()); } else { solrLoggerService.postView(ue.getObject(), ue.getIp(), ue.getUserAgent(), ue.getXforwardedfor(), currentUser, ue.getReferrer()); diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 533258aaffed..0d1cc13106a8 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -7,18 +7,14 @@ */ package org.dspace.discovery; -import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; -import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; @@ -28,7 +24,6 @@ import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; -import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -44,8 +39,6 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; -import org.dspace.discovery.configuration.DiscoveryConfiguration; -import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableItem; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 35dc090df479..a5c90b11e80a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -24,7 +24,6 @@ import org.dspace.app.rest.matcher.BrowseEntryResourceMatcher; import org.dspace.app.rest.matcher.BrowseIndexMatcher; import org.dspace.app.rest.matcher.ItemMatcher; -import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -1516,7 +1515,7 @@ public void testBrowseByEntriesStartsWith() throws Exception { //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=C"))); - }; + } @Test public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { @@ -1676,7 +1675,7 @@ public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=Guión"))); - }; + } @Test public void testBrowseByItemsStartsWith() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 19133e17421a..74f9ab763306 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -48,7 +48,6 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter; import org.dspace.app.rest.matcher.BitstreamMatcher; -import org.dspace.app.rest.matcher.PageMatcher; import org.dspace.app.rest.matcher.ProcessMatcher; import org.dspace.app.rest.matcher.ScriptMatcher; import org.dspace.app.rest.model.ParameterValueRest; @@ -67,8 +66,6 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; -import org.dspace.content.service.BitstreamService; -import org.dspace.eperson.EPerson; import org.dspace.content.authority.DCInputAuthority; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.MetadataAuthorityService; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index b144cab40c5f..f6662bb23e20 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -7,8 +7,6 @@ */ package org.dspace.app.rest; -import static org.dspace.app.rest.matcher.SubmissionFormFieldMatcher.matchFormWithVisibility; -import static org.dspace.app.rest.matcher.SubmissionFormFieldMatcher.matchFormWithoutVisibility; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -20,7 +18,6 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.Locale; -import java.util.Map; import org.dspace.app.rest.matcher.SubmissionFormFieldMatcher; import org.dspace.app.rest.repository.SubmissionFormRestRepository; From d56a61fbedfcbba98a55d34ef27e0eeba44ca560 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 6 Nov 2023 18:00:49 +0100 Subject: [PATCH 508/686] [DSC-1229] Fixes FileTypeMetadataEnhancerConsumerIT --- .../FileTypeMetadataEnhancerConsumerIT.java | 32 ++++++++++++++++--- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java b/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java index bfa29ab330d4..200ae7b3fefa 100644 --- a/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java @@ -16,9 +16,11 @@ import java.io.IOException; import java.sql.SQLException; import java.text.ParseException; +import java.util.List; import java.util.function.Predicate; import org.apache.commons.codec.binary.StringUtils; +import org.apache.curator.shaded.com.google.common.base.Objects; import org.apache.tools.ant.filters.StringInputStream; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.authorize.AuthorizeException; @@ -36,9 +38,11 @@ import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; import org.junit.Before; import org.junit.Test; -import org.mockito.Mockito; public class FileTypeMetadataEnhancerConsumerIT extends AbstractIntegrationTestWithDatabase { @@ -231,7 +235,7 @@ public void testWithTypeDeleted() .filter(metadataFilter(FileTypeMetadataEnhancerConsumer.entityTypeMetadata)) .findFirst() .orElseThrow(); - bitstream.getMetadata().remove(entityType); + bitstreamService.removeMetadataValues(context, bitstream, List.of(entityType)); context.turnOffAuthorisationSystem(); this.bitstreamService.update(context, bitstream); @@ -242,8 +246,8 @@ public void testWithTypeDeleted() bitstream = context.reloadEntity(bitstream); item = context.reloadEntity(item); - assertThat(bitstream.getMetadata(), not(hasItem(with("dc.type", Mockito.any())))); - assertThat(item.getMetadata(), not(hasItem(with("dspace.file.type", Mockito.any())))); + assertThat(bitstream.getMetadata(), not(hasItem(withField("dc.type")))); + assertThat(item.getMetadata(),not(hasItem(withField("dspace.file.type")))); } @Test @@ -429,4 +433,24 @@ private Predicate metadataFilter(MetadataFieldName metada StringUtils.equals(metadataField.element, metadata.getElement()) && StringUtils.equals(metadataField.qualifier, metadata.getQualifier()); } + + public static Matcher withField(final String field) { + return new TypeSafeMatcher() { + + @Override + protected void describeMismatchSafely(MetadataValue metadataValue, Description description) { + description.appendText("was ").appendValue(metadataValue.getMetadataField().toString('.')); + } + + @Override + public void describeTo(Description description) { + description.appendText("MetadataValue with id ").appendValue(field); + } + + @Override + protected boolean matchesSafely(MetadataValue metadataValue) { + return Objects.equal(field, metadataValue.getMetadataField().toString('.')); + } + }; + } } From ca624e7ab5cca3d1fb0331dbce15bab0f2267cc6 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Thu, 2 Nov 2023 13:36:46 -0500 Subject: [PATCH 509/686] removed options to ping search engines when generating sitemaps (cherry picked from commit f8f88060408c30314cdcf38ba5bbac0f367ee3fd) --- .../dspace/app/sitemap/GenerateSitemaps.java | 109 +----------------- dspace/config/dspace.cfg | 13 --- 2 files changed, 3 insertions(+), 119 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index 400b5ecb87cb..5e9a61556083 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -7,15 +7,8 @@ */ package org.dspace.app.sitemap; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLEncoder; import java.sql.SQLException; import java.util.Date; import java.util.Iterator; @@ -29,7 +22,6 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; @@ -87,11 +79,6 @@ public static void main(String[] args) throws Exception { "do not generate sitemaps.org protocol sitemap"); options.addOption("b", "no_htmlmap", false, "do not generate a basic HTML sitemap"); - options.addOption("a", "ping_all", false, - "ping configured search engines"); - options - .addOption("p", "ping", true, - "ping specified search engine URL"); options .addOption("d", "delete", false, "delete sitemaps dir and its contents"); @@ -116,14 +103,13 @@ public static void main(String[] args) throws Exception { } /* - * Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage + * Sanity check -- if no sitemap generation or deletion, print usage */ if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b') && line.hasOption('s') && !line.hasOption('g') - && !line.hasOption('m') && !line.hasOption('y') - && !line.hasOption('p')) { + && !line.hasOption('m') && !line.hasOption('y')) { System.err - .println("Nothing to do (no sitemap to generate, no search engines to ping)"); + .println("Nothing to do (no sitemap to generate)"); hf.printHelp(usage, options); System.exit(1); } @@ -137,20 +123,6 @@ public static void main(String[] args) throws Exception { deleteSitemaps(); } - if (line.hasOption('a')) { - pingConfiguredSearchEngines(); - } - - if (line.hasOption('p')) { - try { - pingSearchEngine(line.getOptionValue('p')); - } catch (MalformedURLException me) { - System.err - .println("Bad search engine URL (include all except sitemap URL)"); - System.exit(1); - } - } - System.exit(0); } @@ -303,79 +275,4 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) c.abort(); } - - /** - * Ping all search engines configured in {@code dspace.cfg}. - * - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingConfiguredSearchEngines() - throws UnsupportedEncodingException { - String[] engineURLs = configurationService - .getArrayProperty("sitemap.engineurls"); - - if (ArrayUtils.isEmpty(engineURLs)) { - log.warn("No search engine URLs configured to ping"); - return; - } - - for (int i = 0; i < engineURLs.length; i++) { - try { - pingSearchEngine(engineURLs[i]); - } catch (MalformedURLException me) { - log.warn("Bad search engine URL in configuration: " - + engineURLs[i]); - } - } - } - - /** - * Ping the given search engine. - * - * @param engineURL Search engine URL minus protocol etc, e.g. - * {@code www.google.com} - * @throws MalformedURLException if the passed in URL is malformed - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingSearchEngine(String engineURL) - throws MalformedURLException, UnsupportedEncodingException { - // Set up HTTP proxy - if ((StringUtils.isNotBlank(configurationService.getProperty("http.proxy.host"))) - && (StringUtils.isNotBlank(configurationService.getProperty("http.proxy.port")))) { - System.setProperty("proxySet", "true"); - System.setProperty("proxyHost", configurationService - .getProperty("http.proxy.host")); - System.getProperty("proxyPort", configurationService - .getProperty("http.proxy.port")); - } - - String sitemapURL = configurationService.getProperty("dspace.ui.url") - + "/sitemap"; - - URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8")); - - try { - HttpURLConnection connection = (HttpURLConnection) url - .openConnection(); - - BufferedReader in = new BufferedReader(new InputStreamReader( - connection.getInputStream())); - - String inputLine; - StringBuffer resp = new StringBuffer(); - while ((inputLine = in.readLine()) != null) { - resp.append(inputLine).append("\n"); - } - in.close(); - - if (connection.getResponseCode() == 200) { - log.info("Pinged " + url.toString() + " successfully"); - } else { - log.warn("Error response pinging " + url.toString() + ":\n" - + resp); - } - } catch (IOException e) { - log.warn("Error pinging " + url.toString(), e); - } - } } diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 61027c555079..381d079ca621 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1403,19 +1403,6 @@ sitemap.dir = ${dspace.dir}/sitemaps # Defaults to "sitemaps", which means they are available at ${dspace.server.url}/sitemaps/ # sitemap.path = sitemaps -# -# Comma-separated list of search engine URLs to 'ping' when a new Sitemap has -# been created. Include everything except the Sitemap URL itself (which will -# be URL-encoded and appended to form the actual URL 'pinged'). -# -sitemap.engineurls = http://www.google.com/webmasters/sitemaps/ping?sitemap= - -# Add this to the above parameter if you have an application ID with Yahoo -# (Replace REPLACE_ME with your application ID) -# http://search.yahooapis.com/SiteExplorerService/V1/updateNotification?appid=REPLACE_ME&url= -# -# No known Sitemap 'ping' URL for MSN/Live search - # Define cron for how frequently the sitemap should refresh. # Defaults to running daily at 1:15am # Cron syntax is defined at https://www.quartz-scheduler.org/api/2.3.0/org/quartz/CronTrigger.html From bf5b19b6da561a9fc2e088dc1b376987c8115760 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 7 Nov 2023 11:03:39 +0100 Subject: [PATCH 510/686] DSC-1342 Sitemap includes links to restricted content --- .../dspace/app/sitemap/GenerateSitemaps.java | 241 +++++++++--------- .../app/rest/SitemapRestControllerIT.java | 80 +++++- 2 files changed, 201 insertions(+), 120 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index 6188272aca47..f3e651d7d26e 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -17,10 +17,7 @@ import java.net.URL; import java.net.URLEncoder; import java.sql.SQLException; -import java.util.Date; import java.util.Iterator; -import java.util.List; -import java.util.Optional; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -28,29 +25,26 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.app.customurl.CustomUrlService; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.ItemService; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; import org.dspace.core.Context; import org.dspace.core.LogHelper; -import org.dspace.discovery.DiscoverQuery; -import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.SearchService; -import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.dspace.utils.DSpace; /** * Command-line utility for generating HTML and Sitemaps.org protocol Sitemaps. @@ -64,15 +58,11 @@ public class GenerateSitemaps { */ private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class); - private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - private static final CollectionService collectionService = - ContentServiceFactory.getInstance().getCollectionService(); - private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private static final SearchService searchService = SearchUtils.getSearchService(); - - private static final CustomUrlService customUrlService = new DSpace().getSingletonService(CustomUrlService.class); + private static final GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + private static final int PAGE_SIZE = 1000; /** * Default constructor @@ -213,112 +203,127 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) } Context c = new Context(Context.Mode.READ_ONLY); - - List comms = communityService.findAll(c); - - for (Community comm : comms) { - String url = uiURLStem + "/communities/" + comm.getID(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - c.uncacheEntity(comm); - } - - List colls = collectionService.findAll(c); - - for (Collection coll : colls) { - String url = uiURLStem + "/collections/" + coll.getID(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - c.uncacheEntity(coll); + SolrSearchCore solrSearchCore = searchService.getSolrSearchCore(); + SolrClient solr = solrSearchCore.getSolr(); + Group anonymousGroup = groupService.findByName(c, Group.ANONYMOUS); + String anonGroupId = ""; + if (anonymousGroup != null) { + anonGroupId = anonymousGroup.getID().toString(); } - Iterator allItems = itemService.findAll(c); - int itemCount = 0; - - while (allItems.hasNext()) { - Item i = allItems.next(); - - Optional customUrl = customUrlService.getCustomUrl(i); - if (customUrl.isPresent()) { - - String url = uiURLStem + "/entities/" + StringUtils.lowerCase(itemService.getEntityTypeLabel(i)) - + "/" + customUrl.get(); - - if (makeHTMLMap) { - html.addURL(url, null); + try { + SolrQuery solrQuery = new SolrQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":Community"); + solrQuery.addFilterQuery("read:g" + anonGroupId); + solrQuery.setFields(SearchUtils.RESOURCE_ID_FIELD); + solrQuery.setRows(PAGE_SIZE); + int offset = 0; + long commsCount = 0; + QueryResponse rsp; + do { + solrQuery.setStart(offset); + rsp = solr.query(solrQuery, solrSearchCore.REQUEST_METHOD); + SolrDocumentList docs = rsp.getResults(); + commsCount = docs.getNumFound(); + Iterator iter = docs.iterator(); + + while (iter.hasNext()) { + SolrDocument doc = (SolrDocument) iter.next(); + String url = uiURLStem + "/communities/" + doc.getFieldValue(SearchUtils.RESOURCE_ID_FIELD); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); + offset += PAGE_SIZE; + } while (offset < commsCount); + + solrQuery = new SolrQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":Collection"); + solrQuery.addFilterQuery("read:g" + anonGroupId); + solrQuery.setFields(SearchUtils.RESOURCE_ID_FIELD); + solrQuery.setRows(PAGE_SIZE); + offset = 0; + long collsCount = 0; + do { + solrQuery.setStart(offset); + rsp = solr.query(solrQuery, solrSearchCore.REQUEST_METHOD); + SolrDocumentList docs = rsp.getResults(); + collsCount = docs.getNumFound(); + Iterator iter = docs.iterator(); + + while (iter.hasNext()) { + SolrDocument doc = (SolrDocument) iter.next(); + String url = uiURLStem + "/collections/" + doc.getFieldValue(SearchUtils.RESOURCE_ID_FIELD); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } + offset += PAGE_SIZE; + } while (offset < collsCount); + + solrQuery = new SolrQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":Item"); + solrQuery.setFields(SearchUtils.RESOURCE_ID_FIELD, "customurl", "search.entitytype"); + solrQuery.addFilterQuery("read:g" + anonGroupId); + solrQuery.addFilterQuery("-discoverable:false"); + solrQuery.setRows(PAGE_SIZE); + offset = 0; + long itemsCount = 0; + do { + solrQuery.setStart(offset); + rsp = solr.query(solrQuery, solrSearchCore.REQUEST_METHOD); + SolrDocumentList docs = rsp.getResults(); + itemsCount = docs.getNumFound(); + Iterator iter = docs.iterator(); + + while (iter.hasNext()) { + SolrDocument doc = (SolrDocument) iter.next(); + String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD); + String entityType = (String) doc.getFirstValue("search.entitytype"); + String customUrl = (String) doc.getFirstValue("customUrl"); + String url = uiURLStem + "/items/" + uuid; + + if (StringUtils.isNotBlank(customUrl)) { + url = uiURLStem + "/entities/" + StringUtils.lowerCase(entityType) + "/" + customUrl; + } else if (StringUtils.isNoneBlank(entityType)) { + url = uiURLStem + "/entities/" + StringUtils.lowerCase(entityType) + "/" + uuid; + } + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } - } - - DiscoverQuery entityQuery = new DiscoverQuery(); - entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*"); - entityQuery.addSearchField("entityType"); - - try { - DiscoverResult discoverResult = searchService.search(c, entityQuery); - - String url; - if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects()) - && CollectionUtils.isNotEmpty(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType")) - && StringUtils.isNotBlank(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0)) - ) { - url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)) - .get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID(); - } else { - url = uiURLStem + "/items/" + i.getID(); } - Date lastMod = i.getLastModified(); + offset += PAGE_SIZE; + } while (offset < itemsCount); - if (makeHTMLMap) { - html.addURL(url, lastMod); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, lastMod); - } - } catch (SearchServiceException e) { - log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage()); + if (makeHTMLMap) { + int files = html.finish(); + log.info(LogHelper.getHeader(c, "write_sitemap", + "type=html,num_files=" + files + ",communities=" + + commsCount + ",collections=" + collsCount + + ",items=" + itemsCount)); } - c.uncacheEntity(i); - - itemCount++; - } - - if (makeHTMLMap) { - int files = html.finish(); - log.info(LogHelper.getHeader(c, "write_sitemap", - "type=html,num_files=" + files + ",communities=" - + comms.size() + ",collections=" + colls.size() - + ",items=" + itemCount)); - } - - if (makeSitemapOrg) { - int files = sitemapsOrg.finish(); - log.info(LogHelper.getHeader(c, "write_sitemap", - "type=html,num_files=" + files + ",communities=" - + comms.size() + ",collections=" + colls.size() - + ",items=" + itemCount)); + if (makeSitemapOrg) { + int files = sitemapsOrg.finish(); + log.info(LogHelper.getHeader(c, "write_sitemap", + "type=html,num_files=" + files + ",communities=" + + commsCount + ",collections=" + collsCount + + ",items=" + itemsCount)); + } + } catch (SolrServerException e) { + throw new RuntimeException(e); + } finally { + c.abort(); } - - c.abort(); } /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java index cbcf970547f7..3dc0bbb05098 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.builder.ItemBuilder.createItem; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -16,6 +17,7 @@ import javax.servlet.ServletException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.content.Collection; @@ -38,10 +40,22 @@ public class SitemapRestControllerIT extends AbstractControllerIntegrationTest { @Autowired ConfigurationService configurationService; + @Autowired + ResourcePolicyService policyService; + private final static String SITEMAPS_ENDPOINT = "sitemaps"; private Item item1; private Item item2; + private Item itemRestricted; + private Item itemUndiscoverable; + private Item entityPublication; + private Item entityPublicationRestricted; + private Item entityPublicationUndiscoverable; + private Community community; + private Community communityRestricted; + private Collection collection; + private Collection collectionRestricted; @Before @Override @@ -52,8 +66,13 @@ public void setUp() throws Exception { context.turnOffAuthorisationSystem(); - Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); + community = CommunityBuilder.createCommunity(context).build(); + communityRestricted = CommunityBuilder.createCommunity(context).build(); + policyService.removeAllPolicies(context, communityRestricted); + collection = CollectionBuilder.createCollection(context, community).build(); + collectionRestricted = CollectionBuilder.createCollection(context, community).build(); + policyService.removeAllPolicies(context, collectionRestricted); + this.item1 = createItem(context, collection) .withTitle("Test 1") .withIssueDate("2010-10-17") @@ -62,6 +81,33 @@ public void setUp() throws Exception { .withTitle("Test 2") .withIssueDate("2015-8-3") .build(); + this.itemRestricted = createItem(context, collection) + .withTitle("Test 3") + .withIssueDate("2015-8-3") + .build(); + policyService.removeAllPolicies(context, itemRestricted); + this.itemUndiscoverable = createItem(context, collection) + .withTitle("Test 4") + .withIssueDate("2015-8-3") + .makeUnDiscoverable() + .build(); + this.entityPublication = createItem(context, collection) + .withTitle("Item Publication") + .withEntityType("Publication") + .withIssueDate("2015-8-3") + .build(); + this.entityPublicationRestricted = createItem(context, collection) + .withTitle("Item Publication Restricted") + .withEntityType("Publication") + .withIssueDate("2015-8-3") + .build(); + policyService.removeAllPolicies(context, entityPublicationRestricted); + this.entityPublicationUndiscoverable = createItem(context, collection) + .withTitle("Item Publication") + .withEntityType("Publication") + .withIssueDate("2015-8-3") + .makeUnDiscoverable() + .build(); runDSpaceScript("generate-sitemaps"); @@ -127,9 +173,39 @@ public void testSitemap_sitemap0Html() throws Exception { .andReturn(); String response = result.getResponse().getContentAsString(); + // contains a link to communities: [dspace.ui.url]/communities/ + assertTrue(response + .contains(configurationService.getProperty("dspace.ui.url") + "/communities/" + community.getID())); + // contains a link to collections: [dspace.ui.url]/collections/ + assertTrue(response + .contains(configurationService.getProperty("dspace.ui.url") + "/collections/" + collection.getID())); // contains a link to items: [dspace.ui.url]/items/ assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item1.getID())); assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item2.getID())); + // contains proper link to entities items + assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublication.getID())); + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + entityPublication.getID())); + // does not contain links to restricted content + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/communities/" + communityRestricted.getID())); + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/collections/" + collectionRestricted.getID())); + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + itemRestricted.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublicationRestricted.getID())); + assertFalse(response.contains( + configurationService.getProperty("dspace.ui.url") + "/items/" + entityPublicationRestricted.getID())); + // does not contain links to undiscoverable content + assertFalse(response + .contains(configurationService.getProperty("dspace.ui.url") + "/items/" + itemUndiscoverable.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/entities/publication/" + + entityPublicationUndiscoverable.getID())); + assertFalse(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + + entityPublicationUndiscoverable.getID())); + } @Test From f4f52157d9af5fe2efe7b8b52c8336adf4c9c2c0 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 7 Nov 2023 11:47:20 +0100 Subject: [PATCH 511/686] [DSC-1229] Fixes BitstreamRestRepositoryIT --- .../java/org/dspace/app/rest/BitstreamRestRepositoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index ac89b90dfbbe..bf53ababe2fd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -1338,7 +1338,7 @@ public void patchReplaceMultipleDescriptionBitstream() throws Exception { bitstream = BitstreamBuilder. createBitstream(context, publicItem1, is) .withName("Bitstream") - .withMimeType("text/plain") + //.withMimeType("text/plain") .build(); } From 131bd782826629d6fb96e42712e8ad48bca2dfc8 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 7 Nov 2023 12:18:45 +0100 Subject: [PATCH 512/686] [DSC-1229] Fixes DiscoveryRelatedBoxComponentIT --- .../rest/DiscoveryRelatedBoxComponentIT.java | 46 +++++++++---------- 1 file changed, 21 insertions(+), 25 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java index 5f27cf57b05c..9d9323483549 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java @@ -243,13 +243,11 @@ public void discoverSearchObjectsTestWithScope() throws Exception { .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), -// FacetEntryMatcher.typeFacet(false), FacetEntryMatcher.authorFacet(false), FacetEntryMatcher.anyFacet("editor", "text"), FacetEntryMatcher.anyFacet("organization", "text"), FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), -// FacetEntryMatcher.anyFacet("subject", "hierarchical"), + FacetEntryMatcher.anyFacet("itemtype", "text"), FacetEntryMatcher.subjectFacet(false), FacetEntryMatcher.dateIssuedFacet(false), FacetEntryMatcher.hasContentInOriginalBundleFacet(false) @@ -283,17 +281,16 @@ public void discoverSearchObjectsTestWithScope() throws Exception { //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), -// FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.anyFacet("editor", "text"), - FacetEntryMatcher.anyFacet("organization", "text"), - FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.anyFacet("editor", "text"), + FacetEntryMatcher.anyFacet("organization", "text"), + FacetEntryMatcher.anyFacet("funding", "text"), + FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) ))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) @@ -326,17 +323,16 @@ public void discoverSearchObjectsTestWithScope() throws Exception { //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), - FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), -// FacetEntryMatcher.typeFacet(false), - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.anyFacet("editor", "text"), - FacetEntryMatcher.anyFacet("organization", "text"), - FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.anyFacet("editor", "text"), + FacetEntryMatcher.anyFacet("organization", "text"), + FacetEntryMatcher.anyFacet("funding", "text"), + FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) ))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) From 3de69e4ed20692e17d0b814a95c6e678fb73acd1 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 7 Nov 2023 12:50:13 +0100 Subject: [PATCH 513/686] [DSC-1229] Fixes DiscoveryRestControllerMultilanguageIT --- .../config/spring/api/test-discovery.xml | 187 ++++++++++-------- 1 file changed, 109 insertions(+), 78 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index 60642118bb39..d38f6cb399a5 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -31,7 +31,38 @@ + + + + + + + + + + + crisrp.name + crisrp.name.variant + crisrp.name.translated + person.givenName + person.familyName + + + + + + + + + + + dc.contributor.author + dc.contributor.editor + + + + @@ -41,9 +72,6 @@ - - - @@ -180,23 +208,30 @@ - - - + + + + + + + - - + + + + + @@ -210,54 +245,51 @@ + - + - - - + + + + + + + queries done by discovery for this configuration--> - (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community + + + search.resourcetype:Item -withdrawn:true AND -discoverable:false - + - - + - - - - - - - - - - @@ -275,60 +307,35 @@ - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject + may appear in search results when the Item is public. See DS-3498 + + + + + + + + + + + + + + --> - - - - - - + + + + + @@ -3629,12 +3636,6 @@ - - - - - - @@ -3739,6 +3740,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From cbb72b1cc38485d8bf47a1f133b880d9f3dcc45c Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 7 Nov 2023 13:03:32 +0100 Subject: [PATCH 514/686] [DSC-1229] Fixes DiscoveryScopeBasedRestControllerIT --- .../config/spring/api/test-discovery.xml | 980 ++++++++++++++++++ .../DiscoveryScopeBasedRestControllerIT.java | 34 +- 2 files changed, 1003 insertions(+), 11 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index d38f6cb399a5..a9f349c55aba 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -164,6 +164,15 @@ + + + + + + + + + @@ -208,6 +217,977 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index a3408a7736df..90bd209a4598 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -502,11 +502,15 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false) + FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.anyFacet("language", "text") )) ); } @@ -614,11 +618,15 @@ public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false) + FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.anyFacet("language", "text") )) ); } @@ -664,11 +672,15 @@ public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), + FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), FacetEntryMatcher.authorFacet(false), FacetEntryMatcher.subjectFacet(false), FacetEntryMatcher.dateIssuedFacet(false), FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false) + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.anyFacet("language", "text") )) ); } From b95453d2ddec03e7efc8c132fe4daef6f8030764 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Tue, 7 Nov 2023 16:00:19 +0200 Subject: [PATCH 515/686] [DSC-1315] fixed check style --- .../org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java index bd4cddcdf260..01ad76300ce0 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java @@ -18,7 +18,6 @@ import javax.annotation.PostConstruct; import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; From 8c422681d0c384ff324328b33d7f60fcf62321fa Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 7 Nov 2023 15:57:20 +0100 Subject: [PATCH 516/686] [DSC-1229] Fixes StatisticsRestRepositoryIT --- .../data/dspaceFolder/config/spring/api/test-discovery.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index a9f349c55aba..de974d70ec74 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -3946,8 +3946,10 @@ - + + + dc.type From d62aaffa17668cbcb384bba66828186cc9d4c7f8 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 7 Nov 2023 16:08:46 +0100 Subject: [PATCH 517/686] [DSC-1229] Fixes BulkAccessControlScriptIT --- .../src/test/data/dspaceFolder/config/spring/api/scripts.xml | 5 ----- .../test/data/dspaceFolder/config/spring/rest/scripts.xml | 5 +++++ 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml index cee2bf0f0629..6facf51941df 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -32,9 +32,4 @@ - - - - - diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml index b40cfa1704a5..1f668b9e1616 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml @@ -63,4 +63,9 @@ + + + + + From 0e76ff559e269d2ec4f5ee5e320697d8219dfc39 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 09:48:50 +0100 Subject: [PATCH 518/686] [DSC-1229] Fixes BrowsesResourceControllerIT --- .../dspaceFolder/config/submission-forms.xml | 14 +- .../rest/converter/BrowseIndexConverter.java | 25 +- .../app/rest/BrowsesResourceControllerIT.java | 519 +----------------- .../app/rest/matcher/BrowseIndexMatcher.java | 27 +- 4 files changed, 60 insertions(+), 525 deletions(-) diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index fa953e4dfddd..8f95de44ed85 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -1765,13 +1765,15 @@ You can leave out the day and/or month if they aren't applicable. dc - type - - onebox - false + subject + + + true + + twobox + Enter appropriate subject keywords or phrases. + srsc - - Select a subject from the vocabulary. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java index 2595968d4d02..3f6fbbee4c6e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java @@ -13,11 +13,14 @@ import java.util.ArrayList; import java.util.List; +import java.util.Optional; import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.projection.Projection; import org.dspace.browse.BrowseIndex; +import org.dspace.content.authority.DSpaceControlledVocabulary; import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.dspace.discovery.configuration.DiscoverySearchFilter; import org.dspace.sort.SortException; import org.dspace.sort.SortOption; import org.springframework.stereotype.Component; @@ -37,7 +40,7 @@ public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { bir.setProjection(projection); List metadataList = new ArrayList(); String id = obj.getName(); - if (obj instanceof DSpaceControlledVocabularyIndex) { + if (isValidControlledVocabularyIndex(obj)) { DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj; metadataList = new ArrayList<>(vocObj.getMetadataFields()); id = vocObj.getVocabulary().getPluginInstanceName(); @@ -74,6 +77,26 @@ public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { return bir; } + private static boolean isValidControlledVocabularyIndex(BrowseIndex obj) { + return obj instanceof DSpaceControlledVocabularyIndex && + hasIndexFieldName((DSpaceControlledVocabularyIndex) obj) && + hasIndexPluginInstanceName((DSpaceControlledVocabularyIndex) obj); + } + + private static boolean hasIndexFieldName(DSpaceControlledVocabularyIndex obj) { + return Optional.ofNullable(obj) + .map(DSpaceControlledVocabularyIndex::getFacetConfig) + .map(DiscoverySearchFilter::getIndexFieldName) + .isPresent(); + } + + private static boolean hasIndexPluginInstanceName(DSpaceControlledVocabularyIndex obj) { + return Optional.ofNullable(obj) + .map(DSpaceControlledVocabularyIndex::getVocabulary) + .map(DSpaceControlledVocabulary::getPluginInstanceName) + .isPresent(); + } + @Override public Class getModelClass() { return BrowseIndex.class; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index a5c90b11e80a..d6070307996b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -67,13 +67,12 @@ public void findAll() throws Exception { //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(5))) - .andExpect(jsonPath("$.page.totalElements", is(11))) + .andExpect(jsonPath("$.page.totalElements", is(14))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) //The array of browse index should have a size 4 - .andExpect(jsonPath("$._embedded.browses", hasSize(11))) + .andExpect(jsonPath("$._embedded.browses", hasSize(14))) //Check that all (and only) the default browse indexes are present .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( @@ -83,11 +82,18 @@ public void findAll() throws Exception { BrowseIndexMatcher.subjectBrowseIndex("asc"), BrowseIndexMatcher.rodeptBrowseIndex("asc"), BrowseIndexMatcher.typeBrowseIndex("asc"), + BrowseIndexMatcher.rpdeptBrowseIndex("asc"), BrowseIndexMatcher.rpnameBrowseIndex("asc"), BrowseIndexMatcher.ounameBrowseIndex("asc"), + BrowseIndexMatcher.eqtitleBrowseIndex("asc"), + BrowseIndexMatcher.typesBrowseIndex(), BrowseIndexMatcher.pjtitleBrowseIndex("asc"), - BrowseIndexMatcher.rpdeptBrowseIndex("asc"), - BrowseIndexMatcher.eqtitleBrowseIndex("asc") + BrowseIndexMatcher.hierarchicalBrowseIndex( + "publication-coar-types", "itemtype", "dc.type" + ), + BrowseIndexMatcher.hierarchicalBrowseIndex( + "srsc", "subject", "dc.subject" + ) ))) ; } @@ -143,9 +149,8 @@ public void findBrowseByVocabulary() throws Exception { .andExpect(status().isOk()) //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Check that the JSON root matches the expected browse index - .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc"))) + .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc", "subject", "dc.subject"))) ; } @@ -507,251 +512,6 @@ public void findBrowseBySubjectItems() throws Exception { .andExpect(jsonPath("$.page.size", is(20))); } - @Test - public void findBrowseBySubjectItemsWithScope() throws Exception { - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and two collections. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); - - //2. Two public items with the same subject and another public item that contains that same subject, but also - // another one - // All of the items are readable by an Anonymous user - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("zPublic item more") - .withIssueDate("2017-10-17") - .withAuthor("Smith, Donald").withAuthor("Doe, John") - .withSubject("ExtraEntry").withSubject("AnotherTest") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col2) - .withTitle("Public item 2") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest") - .build(); - - Item publicItem3 = ItemBuilder.createItem(context, col2) - .withTitle("Public item 3") - .withIssueDate("2016-02-14") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest") - .build(); - - Item withdrawnItem1 = ItemBuilder.createItem(context, col2) - .withTitle("Withdrawn item 1") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest").withSubject("TestingForMore") - .withSubject("ExtraEntry").withSubject("WithdrawnEntry") - .withdrawn() - .build(); - Item privateItem1 = ItemBuilder.createItem(context, col2) - .withTitle("Private item 1") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest").withSubject("TestingForMore") - .withSubject("ExtraEntry").withSubject("PrivateEntry") - .makeUnDiscoverable() - .build(); - - context.restoreAuthSystemState(); - - //** WHEN ** - //An anonymous user browses the items that correspond with the ExtraEntry subject query - getClient().perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "ExtraEntry")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be no elements in collection 2 - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$.page.size", is(20))); - - //** WHEN ** - //An anonymous user browses the items that correspond with the AnotherTest subject query - getClient().perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "AnotherTest")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be only two elements, the ones that we've added with the requested subject - // in collection 2 - .andExpect(jsonPath("$.page.totalElements", is(2))) - .andExpect(jsonPath("$.page.size", is(20))) - //Verify that the title of the public and embargoed items are present and sorted descending - .andExpect(jsonPath("$._embedded.items", contains( - ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, "Public item 2", "2016-02-13"), - ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3, "Public item 3", "2016-02-14") - ))); - - //** WHEN ** - //An anonymous user browses the items that correspond with the PrivateEntry subject query - getClient().perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "PrivateEntry")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be no elements because the item is private - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$.page.size", is(20))); - - //** WHEN ** - //An anonymous user browses the items that correspond with the WithdrawnEntry subject query - getClient().perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "WithdrawnEntry")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be no elements because the item is withdrawn - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$.page.size", is(20))); - } - - @Test - public void findBrowseBySubjectItemsWithScopeAsAdmin() throws Exception { - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and two collections. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); - - //2. Two public items with the same subject and another public item that contains that same subject, but also - // another one - // All of the items are readable by an Anonymous user - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("zPublic item more") - .withIssueDate("2017-10-17") - .withAuthor("Smith, Donald").withAuthor("Doe, John") - .withSubject("ExtraEntry").withSubject("AnotherTest") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col2) - .withTitle("Public item 2") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest") - .build(); - - Item publicItem3 = ItemBuilder.createItem(context, col2) - .withTitle("Public item 3") - .withIssueDate("2016-02-14") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest") - .build(); - - Item withdrawnItem1 = ItemBuilder.createItem(context, col2) - .withTitle("Withdrawn item 1") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest").withSubject("TestingForMore") - .withSubject("ExtraEntry").withSubject("WithdrawnEntry") - .withdrawn() - .build(); - Item privateItem1 = ItemBuilder.createItem(context, col2) - .withTitle("Private item 1") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("AnotherTest").withSubject("TestingForMore") - .withSubject("ExtraEntry").withSubject("PrivateEntry") - .makeUnDiscoverable() - .build(); - - context.restoreAuthSystemState(); - - String adminToken = getAuthToken(admin.getEmail(), password); - - - //** WHEN ** - //An admin user browses the items that correspond with the ExtraEntry subject query - getClient(adminToken).perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "ExtraEntry")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be no elements in collection 2 - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$.page.size", is(20))); - - //** WHEN ** - //An admin user browses the items that correspond with the AnotherTest subject query - getClient(adminToken).perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "AnotherTest")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be only two elements, the ones that we've added with the requested subject - // in collection 2 - .andExpect(jsonPath("$.page.totalElements", is(2))) - .andExpect(jsonPath("$.page.size", is(20))) - //Verify that the title of the public and embargoed items are present and sorted descending - .andExpect(jsonPath("$._embedded.items", contains( - ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, "Public item 2", "2016-02-13"), - ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3, "Public item 3", "2016-02-14") - ))); - - //** WHEN ** - //An admin user browses the items that correspond with the PrivateEntry subject query - getClient(adminToken).perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "PrivateEntry")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be no elements because the item is private - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$.page.size", is(20))); - - //** WHEN ** - //An admin user browses the items that correspond with the WithdrawnEntry subject query - getClient(adminToken).perform(get("/api/discover/browses/subject/items") - .param("scope", String.valueOf(col2.getID())) - .param("filterValue", "WithdrawnEntry")) - //** THEN ** - //The status has to be 200 - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - //We expect there to be no elements because the item is withdrawn - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$.page.size", is(20))); - } - @Test public void findBrowseByTitleItems() throws Exception { context.turnOffAuthorisationSystem(); @@ -889,135 +649,6 @@ public void findBrowseByTitleItems() throws Exception { not(matchMetadata("dc.title", "Internal publication"))))); } - @Test - public void findBrowseByTitleItemsWithScope() throws Exception { - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and two collections. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); - - //2. Two public items that are readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Public item 1") - .withIssueDate("2017-10-17") - .withAuthor("Smith, Donald").withAuthor("Doe, John") - .withSubject("Java").withSubject("Unit Testing") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col2) - .withTitle("Public item 2") - .withIssueDate("2016-02-13") - .withAuthor("Smith, Maria").withAuthor("Doe, Jane") - .withSubject("Angular").withSubject("Unit Testing") - .build(); - - //3. An item that has been made private - Item privateItem = ItemBuilder.createItem(context, col2) - .withTitle("This is a private item") - .withIssueDate("2015-03-12") - .withAuthor("Duck, Donald") - .withSubject("Cartoons").withSubject("Ducks") - .makeUnDiscoverable() - .build(); - - //4. An item with an item-level embargo - Item embargoedItem = ItemBuilder.createItem(context, col2) - .withTitle("An embargoed publication") - .withIssueDate("2017-08-10") - .withAuthor("Mouse, Mickey") - .withSubject("Cartoons").withSubject("Mice") - .withEmbargoPeriod("12 months") - .build(); - - //5. An item that is only readable for an internal groups - Group internalGroup = GroupBuilder.createGroup(context) - .withName("Internal Group") - .build(); - - Item internalItem = ItemBuilder.createItem(context, col2) - .withTitle("Internal publication") - .withIssueDate("2016-09-19") - .withAuthor("Doe, John") - .withSubject("Unknown") - .withReaderGroup(internalGroup) - .build(); - - context.restoreAuthSystemState(); - - //** WHEN ** - //An anonymous user browses the items in the Browse by item endpoint - //sorted descending by tile - getClient().perform(get("/api/discover/browses/title/items") - .param("scope", String.valueOf(col2.getID())) - .param("sort", "title,desc")) - - //** THEN ** - //The status has to be 200 OK - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - - .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(1))) - .andExpect(jsonPath("$.page.totalPages", is(1))) - .andExpect(jsonPath("$.page.number", is(0))) - - .andExpect(jsonPath("$._embedded.items", - contains(ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, - "Public item 2", - "2016-02-13")))) - - //The private and internal items must not be present - .andExpect(jsonPath("$._embedded.items[*].metadata", Matchers.allOf( - not(matchMetadata("dc.title", "This is a private item")), - not(matchMetadata("dc.title", "Internal publication"))))); - - String adminToken = getAuthToken(admin.getEmail(), password); - //** WHEN ** - //An admin user browses the items in the Browse by item endpoint - //sorted descending by tile - getClient(adminToken).perform(get("/api/discover/browses/title/items") - .param("scope", String.valueOf(col2.getID())) - .param("sort", "title,desc")) - - //** THEN ** - //The status has to be 200 OK - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - - .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(3))) - .andExpect(jsonPath("$.page.totalPages", is(1))) - .andExpect(jsonPath("$.page.number", is(0))) - .andExpect(jsonPath("$._embedded.items", contains( - ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, - "Public item 2", - "2016-02-13"), - ItemMatcher.matchItemWithTitleAndDateIssued(internalItem, - "Internal publication", - "2016-09-19"), - ItemMatcher.matchItemWithTitleAndDateIssued(embargoedItem, - "An embargoed publication", - "2017-08-10") - - ))) - - - //The private and internal items must not be present - .andExpect(jsonPath("$._embedded.items[*].metadata", Matchers.allOf( - not(matchMetadata("dc.title", "This is a private item")) - ))); - } - @Test /** * This test was introduced to reproduce the bug DS-4269 Pagination links must be consistent also when there is not @@ -1243,132 +874,6 @@ public void testPaginationBrowseByDateIssuedItems() throws Exception { ))); } - @Test - public void testPaginationBrowseByDateIssuedItemsWithScope() throws Exception { - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and two collections. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); - - //2. 7 public items that are readable by Anonymous - Item item1 = ItemBuilder.createItem(context, col1) - .withTitle("Item 1") - .withIssueDate("2017-10-17") - .build(); - - Item item2 = ItemBuilder.createItem(context, col2) - .withTitle("Item 2") - .withIssueDate("2016-02-13") - .build(); - - Item item3 = ItemBuilder.createItem(context, col1) - .withTitle("Item 3") - .withIssueDate("2016-02-12") - .build(); - - Item item4 = ItemBuilder.createItem(context, col2) - .withTitle("Item 4") - .withIssueDate("2016-02-11") - .build(); - - Item item5 = ItemBuilder.createItem(context, col1) - .withTitle("Item 5") - .withIssueDate("2016-02-10") - .build(); - - Item item6 = ItemBuilder.createItem(context, col2) - .withTitle("Item 6") - .withIssueDate("2016-01-13") - .build(); - - Item item7 = ItemBuilder.createItem(context, col1) - .withTitle("Item 7") - .withIssueDate("2016-01-12") - .build(); - - Item withdrawnItem1 = ItemBuilder.createItem(context, col2) - .withTitle("Withdrawn item 1") - .withIssueDate("2016-02-13") - .withdrawn() - .build(); - - Item privateItem1 = ItemBuilder.createItem(context, col2) - .withTitle("Private item 1") - .makeUnDiscoverable() - .build(); - - - context.restoreAuthSystemState(); - - //** WHEN ** - //An anonymous user browses the items in the Browse by date issued endpoint - //sorted ascending by tile with a page size of 5 - getClient().perform(get("/api/discover/browses/dateissued/items") - .param("scope", String.valueOf(col2.getID())) - .param("sort", "title,asc") - .param("size", "5")) - - //** THEN ** - //The status has to be 200 OK - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - - //We expect only the first five items to be present - .andExpect(jsonPath("$.page.size", is(5))) - .andExpect(jsonPath("$.page.totalElements", is(3))) - .andExpect(jsonPath("$.page.totalPages", is(1))) - .andExpect(jsonPath("$.page.number", is(0))) - - //Verify that the title and date of the items match and that they are sorted ascending - .andExpect(jsonPath("$._embedded.items", - contains( - ItemMatcher.matchItemWithTitleAndDateIssued(item2, - "Item 2", "2016-02-13"), - ItemMatcher.matchItemWithTitleAndDateIssued(item4, - "Item 4", "2016-02-11"), - ItemMatcher.matchItemWithTitleAndDateIssued(item6, - "Item 6", "2016-01-13") - ))); - - String adminToken = getAuthToken(admin.getEmail(), password); - getClient(adminToken).perform(get("/api/discover/browses/dateissued/items") - .param("scope", String.valueOf(col2.getID())) - .param("sort", "title,asc") - .param("size", "5")) - - //** THEN ** - //The status has to be 200 OK - .andExpect(status().isOk()) - //We expect the content type to be "application/hal+json;charset=UTF-8" - .andExpect(content().contentType(contentType)) - - //We expect only the first five items to be present - .andExpect(jsonPath("$.page.size", is(5))) - .andExpect(jsonPath("$.page.totalElements", is(3))) - .andExpect(jsonPath("$.page.totalPages", is(1))) - .andExpect(jsonPath("$.page.number", is(0))) - - //Verify that the title and date of the items match and that they are sorted ascending - .andExpect(jsonPath("$._embedded.items", - contains( - ItemMatcher.matchItemWithTitleAndDateIssued(item2, - "Item 2", "2016-02-13"), - ItemMatcher.matchItemWithTitleAndDateIssued(item4, - "Item 4", "2016-02-11"), - ItemMatcher.matchItemWithTitleAndDateIssued(item6, - "Item 6", "2016-01-13") - ))); - - } @Test public void testBrowseByEntriesStartsWith() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index 3fec8e4158dd..c6ec8db0387b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -19,7 +19,6 @@ import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; /** * Utility class to construct a Matcher for a browse index @@ -88,7 +87,6 @@ public static Matcher dateIssuedBrowseIndex(final String order) public static Matcher rodeptBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("cris.virtual.department")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/rodept")), @@ -99,7 +97,6 @@ public static Matcher rodeptBrowseIndex(final String order) { public static Matcher typeBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.type")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/type")), @@ -110,7 +107,6 @@ public static Matcher typeBrowseIndex(final String order) { public static Matcher rpnameBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/rpname")), @@ -121,7 +117,6 @@ public static Matcher rpnameBrowseIndex(final String order) { public static Matcher rpdeptBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("person.affiliation.name")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/rpdept")), @@ -132,7 +127,6 @@ public static Matcher rpdeptBrowseIndex(final String order) { public static Matcher ounameBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/ouname")), @@ -143,7 +137,6 @@ public static Matcher ounameBrowseIndex(final String order) { public static Matcher pjtitleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/pjtitle")), @@ -154,20 +147,32 @@ public static Matcher pjtitleBrowseIndex(final String order) { public static Matcher eqtitleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/eqtitle")), hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/eqtitle/items")) + ); + } + + public static Matcher typesBrowseIndex() { + return allOf( + hasJsonPath("$.metadata", contains("dc.type")), + hasJsonPath("$.browseType", is("hierarchicalBrowse")), + hasJsonPath("$.facetType", is("itemtype")), + hasJsonPath("$.type", is("browse")), + hasJsonPath("$._links.self.href", is(REST_SERVER_URL + "discover/browses/types")), + hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/types/items")) ); } - public static Matcher hierarchicalBrowseIndex(final String vocabulary) { + public static Matcher hierarchicalBrowseIndex( + String vocabulary, String facetType, String metadata + ) { return allOf( - hasJsonPath("$.metadata", contains("dc.subject")), + hasJsonPath("$.metadata", contains(metadata)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), hasJsonPath("$.type", equalToIgnoringCase("browse")), - hasJsonPath("$.facetType", equalToIgnoringCase("subject")), + hasJsonPath("$.facetType", equalToIgnoringCase(facetType)), hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), hasJsonPath("$._links.vocabulary.href", is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), From 59b6cc425346ad29bd34ce9c5048cc5a063109c9 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Wed, 8 Nov 2023 11:05:05 +0200 Subject: [PATCH 519/686] [DSC-1315] fixed broken test --- .../layout/service/impl/CrisLayoutTabServiceImpl.java | 4 ++++ .../service/impl/CrisLayoutTabServiceImplTest.java | 9 ++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java index 01ad76300ce0..9edb707134f3 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java @@ -218,6 +218,10 @@ public List findByItem(Context context, String itemUuid) throws S } private String getSubmissionDefinitionName(Item item) { + if (submissionConfigReader == null) { + return ""; + } + return submissionConfigReader.getSubmissionConfigByCollection(item.getOwningCollection()).getSubmissionName(); } diff --git a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java index 7260f43849b5..3e439c98010c 100644 --- a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImplTest.java @@ -32,6 +32,7 @@ import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; import org.dspace.layout.dao.CrisLayoutTabDAO; +import org.dspace.services.ConfigurationService; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; @@ -55,6 +56,8 @@ public class CrisLayoutTabServiceImplTest { private AuthorizeService authorizeService; @Mock private ItemService itemService; + @Mock + private ConfigurationService configurationService; @InjectMocks private CrisLayoutTabServiceImpl crisLayoutTabService; @@ -97,7 +100,7 @@ public void allTabsAreReturned() throws SQLException { when(itemService.getMetadata(item, "dspace.entity.type")) .thenReturn(entityType); - when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType)) + when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null)) .thenReturn(Arrays.asList(tabOne, tabTwo, tabThree, tabWithoutBoxes, tabWithOnlyForbiddenBoxes)); List tabs = crisLayoutTabService.findByItem(context, itemUuid); @@ -120,7 +123,7 @@ public void noTabsFoundForEntityType() throws SQLException { when(itemService.getMetadata(item, "dspace.entity.type")) .thenReturn(entityType); - when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType)).thenReturn(emptyList()); + when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null)).thenReturn(emptyList()); List tabs = crisLayoutTabService.findByItem(context, itemUuid); @@ -140,7 +143,7 @@ public void nullTabsFoundForEntityType() throws SQLException { when(itemService.getMetadata(item, "dspace.entity.type")) .thenReturn(entityType); - when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType)).thenReturn(List.of()); + when(tabDao.findByEntityTypeAndEagerlyFetchBoxes(context, entityType, null)).thenReturn(List.of()); List tabs = crisLayoutTabService.findByItem(context, itemUuid); From a62b532c8094f7edece4671f887508d302229459 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 11:08:54 +0100 Subject: [PATCH 520/686] [DSC-1229] Fixes DiscoveryRestControllerIT --- .../config/spring/api/test-discovery.xml | 102 ++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index de974d70ec74..95515815ef68 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -141,6 +141,8 @@ + + @@ -1188,6 +1190,18 @@ + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + @@ -3523,6 +3537,79 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND (entityType_keyword:Publication OR entityType_keyword:Patent OR entityType_keyword:Product) + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + @@ -4752,6 +4839,21 @@ + + + + + + + + + + + + + + + From 9d512eba837acbc3b3abc70dad4215be0faf74c8 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 12:40:31 +0100 Subject: [PATCH 521/686] [DSC-1229] Fixes PubmedImportMetadataSourceServiceIT --- .../PubmedDateMetadatumContributor.java | 14 ++++++---- .../PubmedImportMetadataSourceServiceIT.java | 28 +++++++++++++++++-- 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index add9caef1b74..17816eed9744 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -14,13 +14,17 @@ import java.util.Date; import java.util.LinkedList; import java.util.List; +import java.util.Locale; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.core.I18nUtil; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.beans.factory.annotation.Autowired; /** @@ -99,7 +103,7 @@ public PubmedDateMetadatumContributor(MetadataFieldConfig field, MetadataContrib @Override public Collection contributeMetadata(T t) { List values = new LinkedList<>(); - + final Locale defaultLocale = I18nUtil.getDefaultLocale(); try { LinkedList yearList = (LinkedList) year.contributeMetadata(t); @@ -114,13 +118,13 @@ public Collection contributeMetadata(T t) { if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); - resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd", defaultLocale); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); - resultFormatter = new SimpleDateFormat("yyyy-MM"); + resultFormatter = new SimpleDateFormat("yyyy-MM", defaultLocale); } else { dateString = yearList.get(i).getValue(); - resultFormatter = new SimpleDateFormat("yyyy"); + resultFormatter = new SimpleDateFormat("yyyy", defaultLocale); } int j = 0; @@ -128,7 +132,7 @@ public Collection contributeMetadata(T t) { while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { - SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); + SimpleDateFormat formatter = new SimpleDateFormat(dateFormat, defaultLocale); Date date = formatter.parse(dateString); resultDateString = resultFormatter.format(date); } catch (ParseException e) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java index 3b39d251216c..ffbd77db6a61 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -30,6 +30,8 @@ /** * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + *
    + * Metadata configuration in {@code pubmed-integration.xml} * * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ @@ -130,7 +132,7 @@ private ArrayList getRecords() { + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + " necessary to relate teaching methodologies with the skills developed."); - MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638"); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "pmid", "36708638"); MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); @@ -145,6 +147,12 @@ private ArrayList getRecords() { MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + MetadatumDTO relationIsPartOf = createMetadatumDTO("dc", "relation", "ispartof", "Nurse education in practice"); + MetadatumDTO relationIssn = createMetadatumDTO("dc", "relation", "issn", "1873-5223"); + MetadatumDTO relationCitVolume = createMetadatumDTO("oaire", "citation", "volume", "67"); + MetadatumDTO doiIdentifier = createMetadatumDTO("dc", "identifier", "doi", "10.1016/j.nepr.2023.103548"); + MetadatumDTO dcType = createMetadatumDTO("dc", "type", null, "text::journal::journal article"); + MetadatumDTO dcType2 = createMetadatumDTO("dc", "type", null, "text::review"); metadatums.add(title); metadatums.add(description1); @@ -168,6 +176,12 @@ private ArrayList getRecords() { metadatums.add(subject4); metadatums.add(subject5); metadatums.add(subject6); + metadatums.add(relationIsPartOf); + metadatums.add(relationIssn); + metadatums.add(relationCitVolume); + metadatums.add(doiIdentifier); + metadatums.add(dcType); + metadatums.add(dcType2); ImportRecord record = new ImportRecord(metadatums); records.add(record); @@ -191,11 +205,16 @@ private ArrayList getRecords2() { + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + " visualization tool, is also discussed."); - MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942"); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "pmid", "21975942"); MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO relationIsPartOf = createMetadatumDTO("dc", "relation", "ispartof", "Current protocols in human genetics"); + MetadatumDTO relationIssn = createMetadatumDTO("dc", "relation", "issn", "1934-8258"); + MetadatumDTO relationCitVolume = createMetadatumDTO("oaire", "citation", "volume", "Chapter 6"); + MetadatumDTO doiIdentifier = createMetadatumDTO("dc", "identifier", "doi", "10.1002/0471142905.hg0610s71"); + MetadatumDTO dcType = createMetadatumDTO("dc", "type", null, "text::journal::journal article"); metadatums.add(title); metadatums.add(description); @@ -204,6 +223,11 @@ private ArrayList getRecords2() { metadatums.add(author2); metadatums.add(date); metadatums.add(language); + metadatums.add(relationIsPartOf); + metadatums.add(relationIssn); + metadatums.add(relationCitVolume); + metadatums.add(doiIdentifier); + metadatums.add(dcType); ImportRecord record = new ImportRecord(metadatums); records.add(record); From 08922e684df5b295388e561b90832f901f143703 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Wed, 8 Nov 2023 14:14:01 +0200 Subject: [PATCH 522/686] [DSC-1315] fixed broke Its --- .../dspace/layout/service/impl/CrisLayoutTabServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java index 9edb707134f3..2ce7ee7ac4f4 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java @@ -218,7 +218,7 @@ public List findByItem(Context context, String itemUuid) throws S } private String getSubmissionDefinitionName(Item item) { - if (submissionConfigReader == null) { + if (submissionConfigReader == null || item.getOwningCollection() == null) { return ""; } From 340623b2b7f5b5a359feb75bac9a66b1db217019 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 13:53:14 +0100 Subject: [PATCH 523/686] [DSC-1229] Fixes ScriptRestRepositoryIT --- .../java/org/dspace/app/rest/Application.java | 2 +- .../app/rest/ScriptProcessesController.java | 2 +- .../rest/repository/ScriptRestRepository.java | 7 ++---- .../app/rest/ScriptRestRepositoryIT.java | 24 ++++++++++++------- 4 files changed, 20 insertions(+), 15 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java index 488f5b866b68..b7903c558277 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java @@ -185,7 +185,7 @@ public void addCorsMappings(@NonNull CorsRegistry registry) { // Get allowed origins for api and iiif endpoints. // The actuator endpoints are configured using management.endpoints.web.cors.* properties String[] corsAllowedOrigins = configuration - .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); + .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); String[] iiifAllowedOrigins = configuration .getCorsAllowedOrigins(configuration.getIiifAllowedOriginsConfig()); String[] bitstreamAllowedOrigins = configuration diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 70149bbb6b0c..a8488b56975a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -68,7 +68,7 @@ public class ScriptProcessesController { * @throws Exception If something goes wrong */ @RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) - @PreAuthorize("hasAuthority('AUTHENTICATED')") + @PreAuthorize("permitAll()") public ResponseEntity> startProcess( @PathVariable(name = "name") String scriptName, @RequestParam(name = "file", required = false) List files) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index 0d4e7b1b3256..8570c7096f3c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -54,8 +54,7 @@ public class ScriptRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context) @@ -104,7 +101,7 @@ public ProcessRest startProcess(Context context, String scriptName, List Date: Wed, 8 Nov 2023 14:07:38 +0100 Subject: [PATCH 524/686] [DSC-1229] Fixes SubmissionFormsControllerIT --- .../app/rest/SubmissionFormsControllerIT.java | 61 ++++++------------- 1 file changed, 20 insertions(+), 41 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index f6662bb23e20..99fdca6f6136 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.matcher.SubmissionFormFieldMatcher.matchFormWithVisibility; +import static org.dspace.app.rest.matcher.SubmissionFormFieldMatcher.matchFormWithoutVisibility; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -18,6 +20,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.Locale; +import java.util.Map; import org.dspace.app.rest.matcher.SubmissionFormFieldMatcher; import org.dspace.app.rest.repository.SubmissionFormRestRepository; @@ -758,46 +761,22 @@ public void visibilityTest() throws Exception { getClient(tokenAdmin).perform(get("/api/config/submissionforms/testVisibility")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=0"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=2"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=4"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(5))) - .andExpect(jsonPath("$.page.number", is(3))); - - getClient(tokenAdmin).perform(get("/api/config/submissionforms") - .param("size", "2") - .param("page", "4")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=0"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=4"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=4"), Matchers.containsString("size=2")))) - .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(5))) - .andExpect(jsonPath("$.page.number", is(4))); + .andExpect(jsonPath("$.id", is("testVisibility"))) + .andExpect(jsonPath("$.name", is("testVisibility"))) + .andExpect(jsonPath("$.type", is("submissionform"))) + .andExpect(jsonPath("$.rows[0].fields", contains( + matchFormWithoutVisibility("Title"), + matchFormWithVisibility("Date of Issue", + Map.of("submission", "read-only", "workflow", "hidden", "edit", "hidden")), + matchFormWithVisibility("Type", Map.of("workflow", "hidden", "edit", "hidden")), + matchFormWithVisibility("Language", + Map.of("submission", "read-only", "workflow", "read-only", "edit", "read-only")), + matchFormWithVisibility("Author(s)", Map.of("workflow", "read-only", "edit", "read-only")), + matchFormWithVisibility("Editor(s)", + Map.of("submission", "read-only", "workflow", "hidden", "edit", "hidden")), + matchFormWithVisibility("Subject(s)", + Map.of("submission", "hidden", "workflow", "read-only", "edit", "read-only")), + matchFormWithVisibility("Description", Map.of("submission", "hidden")) + ))); } } From df2052c61edcf66cb5cec4b1f38245435e98343c Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 14:11:17 +0100 Subject: [PATCH 525/686] [DSC-1229] Fixes checkstyle --- .../contributor/PubmedDateMetadatumContributor.java | 2 -- .../org/dspace/app/rest/BrowsesResourceControllerIT.java | 7 ++++++- .../app/rest/PubmedImportMetadataSourceServiceIT.java | 3 ++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index 17816eed9744..f1053fe19ccb 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -23,8 +23,6 @@ import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.beans.factory.annotation.Autowired; /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index d6070307996b..41ee6be0e17e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -150,7 +150,12 @@ public void findBrowseByVocabulary() throws Exception { //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) //Check that the JSON root matches the expected browse index - .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc", "subject", "dc.subject"))) + .andExpect( + jsonPath( + "$", + BrowseIndexMatcher.hierarchicalBrowseIndex("srsc", "subject", "dc.subject") + ) + ) ; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java index ffbd77db6a61..10bd4f6582cd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -210,7 +210,8 @@ private ArrayList getRecords2() { MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); - MetadatumDTO relationIsPartOf = createMetadatumDTO("dc", "relation", "ispartof", "Current protocols in human genetics"); + MetadatumDTO relationIsPartOf = + createMetadatumDTO("dc", "relation", "ispartof", "Current protocols in human genetics"); MetadatumDTO relationIssn = createMetadatumDTO("dc", "relation", "issn", "1934-8258"); MetadatumDTO relationCitVolume = createMetadatumDTO("oaire", "citation", "volume", "Chapter 6"); MetadatumDTO doiIdentifier = createMetadatumDTO("dc", "identifier", "doi", "10.1002/0471142905.hg0610s71"); From 55dfcdf508f0e5bef2ff1ed1510a750fc4fef2a1 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 16:41:17 +0100 Subject: [PATCH 526/686] [DSC-1229] Fixes ReferCrosswalkIT --- .../publication-coar-types.xml | 239 ++++++++++++++++++ .../publication-coar-types_it.xml | 239 ++++++++++++++++++ .../publication-coar-types_uk.xml | 239 ++++++++++++++++++ .../dspaceFolder/config/submission-forms.xml | 16 +- .../config/submission-forms_it.xml | 16 +- .../config/submission-forms_uk.xml | 2 +- .../crosswalks/ReferCrosswalkIT.java | 14 +- 7 files changed, 740 insertions(+), 25 deletions(-) create mode 100644 dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml create mode 100644 dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml create mode 100644 dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml new file mode 100644 index 000000000000..ee726233cc78 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types.xml @@ -0,0 +1,239 @@ + + + + + + A resource consisting primarily of words for reading. Examples include books, letters, dissertations, poems, newspapers, articles, archives of mailing lists. Note that facsimiles or images of texts are still of the genre Text. [Source: http://purl.org/dc/dcmitype/Text] + + + + An annotation in the sense of a legal note is a legally explanatory comment on a decision handed down by a court or arbitral tribunal. [Source: DRIVER info:eu-repo definition] + + + + A list of the books and articles that have been used by someone when writing a particular book or article [Source: https://dictionary.cambridge.org/dictionary/english/bibliography] + + + + A piece of writing or other item of content published on a blog. [Source: https://www.lexico.com/definition/blog_post] + + + + A non-serial publication that is complete in one volume or a designated finite number of volumes. [Source: Adapted from http://purl.org/eprint/type/Book] + + + + A defined chapter or section of a book, usually with a separate title or number. [Source: http://purl.org/spar/fabio/BookChapter] + + + + + + All kind of digital resources contributed to a conference, like conference presentation (slides), conference report, conference lecture, abstracts, demonstrations. For conference papers, posters or proceedings the specific sub-concepts should be used. [COAR definition] + + + + A paper, typically the realization of a research paper reporting original research findings. Use this label when the paper is not published in a proceeding. [Source: Adapted from http://purl.org/spar/fabio/ConferencePaper] + + + + A display poster, typically containing text with illustrative figures and/or tables, usually reporting research results or proposing hypotheses, submitted for acceptance to and/or presented at a conference, seminar, symposium, workshop or similar event. Use this label when the poster is not published in a proceeding. [Source: http://purl.org/spar/fabio/ConferencePoster] + + + + A set of slides containing text, tables or figures, designed to communicate ideas or research results, for projection and viewing by an audience at a conference, symposium, seminar, lecture, workshop or other gatherings. [Source: Adapted from http://purl.org/spar/fabio/Presentation] + + + + Conference proceedings is the official record of a conference meeting. It is a collection of documents which corresponds to the presentations given at the conference. It may include additional content. [Source: http://www.ieee.org/documents/confprocdefined.pdf ] + + + + A paper, published within a conference proceeding, typically the realization of a research paper reporting original research findings. [Source: Adapted from http://purl.org/spar/fabio/ConferencePaper] + + + + A display poster, published within a conference proceeding, typically containing text with illustrative figures and/or tables, usually reporting research results or proposing hypotheses, submitted for acceptance to and/or presented at a conference, seminar, symposium, workshop or similar event. [Source: Adapted http://purl.org/spar/fabio/ConferencePoster] + + + + + + + + A journal is a serial publication devoted to disseminating original research and current developments on a subject. (Adapted from ODLIS) [Source: http://dspacecris.eurocris.org/cris/classcerif/classcerif00422] + + + + A brief essay expressing the opinion or position of the chief editor(s) of a (academic) journal with respect to a current political, social, cultural, or professional issue. [Source: Adapted from ODLIS [Source: http://www.abc-clio.com/ODLIS/odlis_e.aspx ] + + + + An article, typically the realization of a research paper reporting original research findings, published in a journal issue. [Source: http://purl.org/spar/fabio/JournalArticle] + + + + A formal correction to an error introduced by the author into a previously published document. (adapted from https://sparontologies.github.io/fabio/current/fabio.html#d4e2712) + + + + A data paper is a scholarly publication describing a particular dataset or group of dataset, published in the form of a peer-reviewed article in a scholarly journal. The main purpose of a data paper is to describe data, the circumstances of their collection, and information related to data features, access and potential reuse. Adapted from https://en.wikipedia.org/wiki/Data_paper and http://www.gbif.org/publishing-data/data-papers + + + + A research article is a primary source, that is, it reports the methods and results of an original study performed by the authors. (adapted from http://apus.libanswers.com/faq/2324) + + + + A review article is a secondary source, that is, it is written about other articles, and does not report original research of its own. [Source: Adapted from http://apus.libanswers.com/faq/2324] + + + + A software paper should include the rationale for the development of the tool and details of the code used for its construction. [Source: Adapted from https://f1000research.com/for-authors/article-guidelines/software-tool-articles ] + + + + + + A letter addressed to the editor and comments on or discussed an item previously published by that periodical, or of interest to its readership. [Source: Adapted from http://purl.org/spar/fabio/Letter] + + + + + + Transcription of an oral presentation/talk intended to present information or teach people about a particular subject, for example by a university or college teacher. [Source: Adopted from https://en.wikipedia.org/wiki/Lecture] + + + + A brief description of important new research, also known as “communication”. [Source: https://cerif.eurocris.org/vocab/html/OutputTypes.html#Letter] + + + + A popular interest periodical usually containing articles on a variety of topics, written by various authors in a nonscholarly style or a trade publication, unlike a consumer publication, covers a specific topic for people who work in that particular field or industry. [Source: Adapted from https://www.thebalance.com/what-is-a-trade-publication-exactly-2316039 and http://www.abc-clio.com/ODLIS/odlis_m.aspx] + + + + A manuscript is a work of any kind (text, inscription, music score, map, etc.) written entirely by hand. [Source: https://products.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Symbols used to write music, as in a music score, and to express mathematical concepts. +[Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_n.aspx] + + + + A non-peer reviewed periodical, usually published daily or weekly, consisting primarily of editorials and news items concerning current or recent events and matters of public interest. [Source: http://purl.org/spar/fabio/Newspaper] + + + + Work consisting of a news item appearing in a general-interest newspaper or other general news periodical, containing information of current and timely interest in a field. (Adapted from http://www.reference.md/files/D018/mD018431.html ) + + + + + + A resource type that is not included in existing terms under the top concept "Text". [COAR definition] + + + + A preprint is a scientific manuscript without peer-review and has not yet been accepted by a journal, typicaly submitted to a public server/ repository by the author. [Source: Adapted from https://asapbio.org/preprint-info/preprint-faq#qaef-637] + + + + A report is a separately published record of research findings, research still in progress, policy developments and events, or other technical findings, usually bearing a report number and sometimes a grant number assigned by the funding agency. Also, an official record of the activities of a committee or corporate entity, the proceedings of a government body, or an investigation by an agency, whether published or private, usually archived or submitted to a higher authority, voluntarily or under mandate. In a more general sense, any formal account of facts or information related to a specific event or phenomenon, sometimes given at regular intervals. [Source: http://lu.com/odlis/odlis_R.cfm#report ] + + + + A work that reports on the results of a research study to evaluate interventions or exposures on biomedical or health-related outcomes. The two main types of clinical studies are interventional studies (clinical trials) and observational studies. While most clinical studies concern humans, this publication type may be used for clinical veterinary articles meeting the requisites for humans. [Source: https://www.ncbi.nlm.nih.gov/mesh/2009830] + + + + A formal statement describing how research data will be managed and documented throughout a research project and the terms regarding the subsequent deposit of the data with a data repository for long-term management and preservation. [Source: https://casrai.org/rdm-glossary] + + + + A formal note distributed internally to one or more persons in a company, agency, organization, or institution, with a header indicating the date it was sent and stating to whom it is addressed (To:), from whom it is sent (From:), and the subject of the text (Re:). Unlike a letter, a memo does not require a full salutation or signature at the end of the text--the sender may simply initial his or her name in the header. [Source: https://products.abc-clio.com/ODLIS/odlis_m.aspx#memorandum] + + + + A policy report presents what is known about a particular issue or problem. It assembles facts and evidence to help readers understand complex issues and form a response. It might aim to be neutral, or it might aim to persuade readers in a particular direction. [Source: https://www.uow.edu.au/student/learning-co-op/assessments/policy-report/#] + + + + A document containing a project report, intended to be delivered to a customer or funding agency describing the results achieved within a specific project. [Source: http://purl.org/spar/fabio/ProjectReportDocument] + + + + The protocol is a detailed plan of the research study including a project summary, project description covering the rationale, objectives, methodology, data management and analysis, ethical considerations, gender issues and references. [Source: Adapted from https://www.who.int/publications/i/item/a-practical-guide-for-health-researchers] + + + + It is publication that reports on the findings of a research project or alternatively scientific observations on or about a subject. [Source: Adapted from https://en.wikipedia.org/wiki/Research_report] + + + + A document that describes the process, progress, or results of technical or scientific research or the state of a technical or scientific research problem. It might also include recommendations and conclusions of the research. [Source: http://guides.library.cornell.edu/ecommons/types] + + + + + + A research proposal is a document proposing a research project, generally in the sciences or academia, and generally constitutes a request for sponsorship of that research. [Source: https://en.wikipedia.org/wiki/Research_proposal] + + + + A review of others' published work. [Source: Adapted from http://purl.org/spar/fabio/Review] + + + + A written review and critical analysis of the content, scope and quality of a book or other monographic work. [Source: http://purl.org/spar/fabio/BookReview] + + + + A commentary is a more in-depth analysis written to draw attention to a work already published. Commentaries are somewhat like “reviews” in that the author presents his or her analysis of a work and why it would be of interest to a specific audience. [Source: https://www.enago.com/academy/perspective-opinion-and-commentary-pieces] + + + + An evaluation of scientific, academic, or professional work by others working in the same field. [Source: Adopted from https://schema.datacite.org/meta/kernel-4.4/doc/DataCite-MetadataKernel_v4.4.pdf] + + + + + + Technical documentation refers to any type of documentation that describes handling, functionality and architecture of a technical product or a product under development or use. [Source: https://en.wikipedia.org/wiki/Technical_documentation] + + + + A book authored by a student containing a formal presentations of research outputs submitted for examination in completion of a course of study at an institution of higher education, to fulfil the requirements for an academic degree. Also know as a dissertation. [Source: http://purl.org/spar/fabio/Thesis] + + + + A thesis reporting a research project undertaken as part of an undergraduate course of education leading to a bachelor's degree. [Source: http://purl.org/spar/fabio/BachelorsThesis] + + + + A thesis reporting the research undertaken during a period of graduate study leading to a doctoral degree. [Source: http://purl.org/spar/fabio/DoctoralThesis] + + + + A thesis reporting a research project undertaken as part of a graduate course of education leading to a master's degree. [Source: http://purl.org/spar/fabio/MastersThesis] + + + + + + A written record of words spoken in court proceedings or in a speech, interview, broadcast, or sound recording. [Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_t.aspx] + + + + A working or discussion paper circulated publicly or among a group of peers. Certain disciplines, for example economics, issue working papers in series. [Source: http://www.ukoln.ac.uk/repositories/digirep/index/Eprints_Type_Vocabulary_Encoding_Scheme#:~:text=http%3A//purl.org/eprint/type/WorkingPaper] + + + + + + A resource type that is not included in existing terms. [COAR definition] + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml new file mode 100644 index 000000000000..1fcd85735707 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_it.xml @@ -0,0 +1,239 @@ + + + + + + Una risorsa costituita principalmente da parole da leggere. Esempi sono libri, lettere, tesi di laurea, poesie, giornali, articoli, archivi di mailing list. Si noti che i facsimili o le immagini di testi appartengono ancora al genere Testo. [Fonte: http://purl.org/dc/dcmitype/Text] + + + + Un'annotazione nel senso di nota legale è un commento giuridicamente esplicativo su una decisione emessa da un tribunale o da un tribunale arbitrale. [Fonte: DRIVER info:eu-repo definition] + + + + Un elenco di libri e articoli che sono stati utilizzati da qualcuno per scrivere un particolare libro o articolo [Fonte: https://dictionary.cambridge.org/dictionary/english/bibliography] + + + + Un pezzo di scrittura o altro contenuto pubblicato su un blog. [Fonte: https://www.lexico.com/definition/blog_post] + + + + Una pubblicazione non seriale completa in un volume o in un numero finito di volumi. [Fonte: Adattato da http://purl.org/eprint/type/Book] + + + + Un capitolo o una sezione definita di un libro, di solito con un titolo o un numero separato. [Fonte: http://purl.org/spar/fabio/BookChapter] + + + + + + Tutti i tipi di risorse digitali fornite a una conferenza, come presentazioni di conferenze (diapositive), relazioni di conferenze, lezioni di conferenze, abstract, dimostrazioni. Per i documenti, i poster o gli atti di una conferenza si devono usare i sottoconcetti specifici. [definizione COAR] + + + + Un documento, tipicamente la realizzazione di un lavoro di ricerca che riporta i risultati di una ricerca originale. Utilizzare questa etichetta quando l'articolo non è stato pubblicato in un documento. [Fonte: Adattato da http://purl.org/spar/fabio/ConferencePaper] + + + + Un poster espositivo, tipicamente contenente un testo con figure e/o tabelle illustrative, che di solito riporta i risultati di una ricerca o propone ipotesi, presentato per essere accettato e/o presentato a una conferenza, un seminario, un simposio, un workshop o un evento simile. Utilizzare questa etichetta se il poster non è stato pubblicato in un documento. [Fonte: http://purl.org/spar/fabio/ConferencePoster] + + + + Una serie di diapositive contenenti testo, tabelle o figure, progettate per comunicare idee o risultati di ricerca, per la proiezione e la visione da parte di un pubblico in occasione di una conferenza, un simposio, un seminario, una lezione, un workshop o altri incontri. [Fonte: Adattato da http://purl.org/spar/fabio/Presentation] + + + + Gli atti della conferenza sono il resoconto ufficiale di una riunione di conferenza. Si tratta di una raccolta di documenti che corrispondono alle presentazioni tenute durante la conferenza. Può includere contenuti aggiuntivi. [Fonte: http://www.ieee.org/documents/confprocdefined.pdf ] + + + + Un documento, pubblicato all'interno degli atti di una conferenza, è in genere la realizzazione di un lavoro di ricerca che riporta i risultati di una ricerca originale. [Fonte: Adattato da http://purl.org/spar/fabio/ConferencePaper] + + + + Un poster espositivo, pubblicato all'interno di un documento di conferenza, contenente solitamente un testo con figure e/o tabelle illustrative, che di solito riporta risultati di ricerca o propone ipotesi, presentato per l'accettazione e/o presentato a una conferenza, un seminario, un simposio, un workshop o un evento simile. [Fonte: adattato http://purl.org/spar/fabio/ConferencePoster] + + + + + + + + Una rivista è una pubblicazione seriale dedicata alla diffusione di ricerche originali e di sviluppi attuali su un argomento. (Adattato da ODLIS) [Fonte: http://dspacecris.eurocris.org/cris/classcerif/classcerif00422] + + + + Un breve saggio che esprime l'opinione o la posizione del direttore (o dei direttori) di una rivista (accademica) rispetto a una questione politica, sociale, culturale o professionale attuale. [Fonte: Adattato da ODLIS [Fonte: http://www.abc-clio.com/ODLIS/odlis_e.aspx ] + + + + Un articolo, tipicamente la realizzazione di un lavoro di ricerca che riporta i risultati di una ricerca originale, pubblicato in un numero di una rivista. [Fonte: http://purl.org/spar/fabio/JournalArticle] + + + + Una correzione formale di un errore introdotto dall'autore in un documento precedentemente pubblicato. (adattato da https://sparontologies.github.io/fabio/current/fabio.html#d4e2712) + + + + Un documento sui dati è una pubblicazione scientifica che descrive un particolare insieme di dati o un gruppo di dati, pubblicata sotto forma di articolo sottoposto a revisione paritaria in una rivista scientifica. Lo scopo principale di un documento sui dati è descrivere i dati, le circostanze della loro raccolta e le informazioni relative alle caratteristiche dei dati, all'accesso e al potenziale riutilizzo. Adattato da https://en.wikipedia.org/wiki/Data_paper e http://www.gbif.org/publishing-data/data-papers + + + + Un articolo di ricerca è una fonte primaria, cioè riporta i metodi e i risultati di uno studio originale condotto dagli autori. (adattato da http://apus.libanswers.com/faq/2324) + + + + Un articolo di revisione è una fonte secondaria, cioè è scritto su altri articoli e non riporta ricerche originali proprie. [Fonte: Adattato da http://apus.libanswers.com/faq/2324] + + + + Un documento sul software dovrebbe includere le motivazioni per lo sviluppo dello strumento e i dettagli del codice utilizzato per la sua costruzione. [Fonte: Adattato da https://f1000research.com/for-authors/article-guidelines/software-tool-articles ] + + + + + + Una lettera indirizzata all'editore che commenta o discute un articolo precedentemente pubblicato da quel periodico, o di interesse per i suoi lettori. [Fonte: Adattato da http://purl.org/spar/fabio/Letter] + + + + + + Trascrizione di una presentazione orale o di un discorso destinato a presentare informazioni o a insegnare alle persone un particolare argomento, ad esempio da parte di un insegnante universitario o di un college. [Fonte: Adattato da https://en.wikipedia.org/wiki/Lecture] + + + + Breve descrizione di una nuova ricerca importante, nota anche come "comunicazione". [Fonte: https://cerif.eurocris.org/vocab/html/OutputTypes.html#Letter] + + + + Un periodico di interesse popolare che di solito contiene articoli su una varietà di argomenti, scritti da vari autori in uno stile non accademico o una pubblicazione commerciale che, a differenza di una pubblicazione di consumo, tratta un argomento specifico per le persone che lavorano in quel particolare campo o settore. [Fonte: Adattato da https://www.thebalance.com/what-is-a-trade-publication-exactly-2316039 e http://www.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Un manoscritto è un'opera di qualsiasi tipo (testo, iscrizione, spartito musicale, mappa, ecc.) scritta interamente a mano. [Fonte: https://products.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Simboli usati per scrivere musica, come in uno spartito, e per esprimere concetti matematici. +[Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_n.aspx] + + + + Un periodico non recensito, di solito pubblicato quotidianamente o settimanalmente, che consiste principalmente in editoriali e notizie riguardanti eventi attuali o recenti e questioni di interesse pubblico. [Fonte: http://purl.org/spar/fabio/Newspaper] + + + + Lavoro che consiste in una notizia apparsa su un giornale di interesse generale o su un altro periodico di informazione generale, contenente informazioni di interesse attuale e tempestivo in un campo. (Adattato da http://www.reference.md/files/D018/mD018431.html ) + + + + + + Un tipo di risorsa che non è incluso nei termini esistenti sotto il concetto superiore "Testo". [definizione COAR] + + + + Un preprint è un manoscritto scientifico non sottoposto a peer-review e non ancora accettato da una rivista, tipicamente inviato dall'autore a un server/repository pubblico. [Fonte: Adattato da https://asapbio.org/preprint-info/preprint-faq#qaef-637] + + + + Un rapporto è un resoconto pubblicato separatamente dei risultati di una ricerca, di una ricerca ancora in corso, di sviluppi ed eventi politici o di altri risultati tecnici, di solito con un numero di rapporto e talvolta di sovvenzione assegnato dall'agenzia di finanziamento. È anche un documento ufficiale delle attività di un comitato o di un'entità aziendale, dei procedimenti di un ente governativo o di un'indagine di un'agenzia, pubblicato o privato, solitamente archiviato o presentato a un'autorità superiore, volontariamente o su mandato. In senso più generale, qualsiasi resoconto formale di fatti o informazioni relativi a un evento o fenomeno specifico, talvolta fornito a intervalli regolari. [Fonte: http://lu.com/odlis/odlis_R.cfm#report ] + + + + Un lavoro che riporta i risultati di uno studio di ricerca per valutare interventi o esposizioni su esiti biomedici o sanitari. I due tipi principali di studi clinici sono gli studi interventistici (trial clinici) e gli studi osservazionali. Sebbene la maggior parte degli studi clinici riguardi gli esseri umani, questo tipo di pubblicazione può essere utilizzato per articoli clinici veterinari che soddisfano i requisiti per gli esseri umani. [Fonte: https://www.ncbi.nlm.nih.gov/mesh/2009830] + + + + Una dichiarazione formale che descrive come i dati di ricerca saranno gestiti e documentati nel corso di un progetto di ricerca e i termini relativi al successivo deposito dei dati presso un archivio di dati per la gestione e la conservazione a lungo termine. [Fonte: https://casrai.org/rdm-glossary] + + + + Una nota formale distribuita internamente a una o più persone in un'azienda, agenzia, organizzazione o istituzione, con un'intestazione che indica la data di invio e che specifica a chi è indirizzata (To:), da chi è inviata (From:) e l'oggetto del testo (Re:). A differenza di una lettera, un memo non richiede un saluto completo o una firma alla fine del testo: il mittente può semplicemente siglare il proprio nome nell'intestazione. [Fonte: https://products.abc-clio.com/ODLIS/odlis_m.aspx#memorandum] + + + + Un rapporto politico presenta ciò che si sa su una particolare questione o problema. Riunisce fatti e prove per aiutare i lettori a comprendere questioni complesse e a formulare una risposta. Può mirare a essere neutrale o a persuadere i lettori in una particolare direzione. [Fonte: https://www.uow.edu.au/student/learning-co-op/assessments/policy-report/#] + + + + Un documento contenente una relazione di progetto, destinato a essere consegnato a un cliente o a un'agenzia di finanziamento, che descrive i risultati ottenuti nell'ambito di un progetto specifico. [Fonte: http://purl.org/spar/fabio/ProjectReportDocument] + + + + Il protocollo è un piano dettagliato dello studio di ricerca che include un sommario del progetto, una descrizione del progetto che copre il razionale, gli obiettivi, la metodologia, la gestione e l'analisi dei dati, le considerazioni etiche, le questioni di genere e i riferimenti. [Fonte: Adattato da https://www.who.int/publications/i/item/a-practical-guide-for-health-researchers] + + + + È una pubblicazione che riporta i risultati di un progetto di ricerca o, in alternativa, osservazioni scientifiche su un argomento. [Fonte: Adattato da https://en.wikipedia.org/wiki/Research_report] + + + + Un documento che descrive il processo, i progressi o i risultati di una ricerca tecnica o scientifica o lo stato di un problema di ricerca tecnica o scientifica. Può anche includere raccomandazioni e conclusioni della ricerca. [Fonte: http://guides.library.cornell.edu/ecommons/types] + + + + + + Una proposta di ricerca è un documento che propone un progetto di ricerca, generalmente in ambito scientifico o accademico, e costituisce generalmente una richiesta di sponsorizzazione di tale ricerca. [Fonte: https://en.wikipedia.org/wiki/Research_proposal] + + + + Una rassegna di lavori pubblicati da altri. [Fonte: adattato da http://purl.org/spar/fabio/Review] + + + + Una recensione scritta e un'analisi critica del contenuto, della portata e della qualità di un libro o di un'altra opera monografica. [Fonte: http://purl.org/spar/fabio/BookReview] + + + + Un commento è un'analisi più approfondita scritta per attirare l'attenzione su un'opera già pubblicata. I commenti sono in qualche modo simili alle "recensioni", in quanto l'autore presenta la propria analisi di un'opera e il motivo per cui sarebbe interessante per un pubblico specifico. [Fonte: https://www.enago.com/academy/perspective-opinion-and-commentary-pieces] + + + + Una valutazione di un lavoro scientifico, accademico o professionale da parte di altri che lavorano nello stesso campo. [Fonte: ] Adottato da https://schema.datacite.org/meta/kernel-4.4/doc/DataCite-MetadataKernel_v4.4.pdf] + + + + + + La documentazione tecnica si riferisce a qualsiasi tipo di documentazione che descrive la gestione, la funzionalità e l'architettura di un prodotto tecnico o di un prodotto in fase di sviluppo o di utilizzo. [Fonte: https://en.wikipedia.org/wiki/Technical_documentation] + + + + Un libro scritto da uno studente che contiene una presentazione formale dei risultati della ricerca presentata per l'esame al termine di un corso di studi presso un istituto di istruzione superiore, per soddisfare i requisiti di un titolo accademico. Conosciuto anche come tesi di laurea. [Fonte: http://purl.org/spar/fabio/Thesis] + + + + Una tesi che riporta un progetto di ricerca intrapreso nell'ambito di un corso di studi universitario che porta al conseguimento di un diploma di laurea. [Fonte: http://purl.org/spar/fabio/BachelorsThesis] + + + + Una tesi che riporta la ricerca intrapresa durante un periodo di studi universitari che porta al conseguimento di un dottorato. [Fonte: http://purl.org/spar/fabio/DoctoralThesis] + + + + Una tesi che riporta un progetto di ricerca intrapreso nell'ambito di un corso di laurea che porta al conseguimento di un master. [Fonte: http://purl.org/spar/fabio/MastersThesis] + + + + + + Una registrazione scritta di parole pronunciate in un procedimento giudiziario o in un discorso, un'intervista, una trasmissione o una registrazione sonora. [Fonte: Adattato da https://products.abc-clio.com/ODLIS/odlis_t.aspx] + + + + Un documento di lavoro o di discussione diffuso pubblicamente o tra un gruppo di colleghi. Alcune discipline, ad esempio l'economia, pubblicano documenti di lavoro in serie. [Fonte: http://www.ukoln.ac.uk/repositories/digirep/index/Eprints_Type_Vocabulary_Encoding_Scheme#:~:text=http%3A//purl.org/eprint/type/WorkingPaper]. + + + + + + Un tipo di risorsa che non è incluso nei termini esistenti. [Definizione COAR] + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml new file mode 100644 index 000000000000..9e0bb808e5f7 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/publication-coar-types_uk.xml @@ -0,0 +1,239 @@ + + + + + + Ресурс, що складається переважно зі слів для читання. Прикладами можуть бути книги, листи, дисертації, вірші, газети, статті, архіви списків розсилки. Зауважте, що факсиміле або зображення текстів все ще належать до жанру Текст. [Джерело: http://purl.org/dc/dcmitype/Text] + + + + Анотація в значенні юридичної замітки - це юридично роз'яснювальний коментар до рішення, винесеного судом або арбітражним судом. [Джерело: DRIVER info:eu-repo визначення]. + + + + Список книг і статей, які були використані кимось при написанні певної книги або статті [Джерело: https://dictionary.cambridge.org/dictionary/english/bibliography]. + + + + Твір або інший елемент контенту, опублікований у блозі. [Джерело: https://www.lexico.com/definition/blog_post] + + + + Несерійне видання, завершене в одному томі або визначеній кінцевій кількості томів. [Джерело: адаптовано з http://purl.org/eprint/type/Book] + + + + Певна глава або розділ книги, зазвичай з окремим заголовком або номером. [Джерело: http://purl.org/spar/fabio/BookChapter] + + + + + + Всі види цифрових ресурсів, що були представлені на конференції, такі як презентація конференції (слайди), доповідь конференції, лекція конференції, тези, демонстрації. Для конференційних доповідей, постерів або матеріалів слід використовувати спеціальні підпоняття. [Визначення COAR]. + + + + Документ, як правило, реалізація дослідницької роботи, в якій викладено оригінальні результати дослідження. Використовуйте це позначення, коли стаття не публікується у збірнику наукових праць. [Джерело: адаптовано з http://purl.org/spar/fabio/ConferencePaper] + + + + Плакат для демонстрації, що зазвичай містить текст з ілюстративними рисунками та/або таблицями, зазвичай повідомляє про результати дослідження або пропонує гіпотези, поданий для прийняття та/або представлений на конференції, семінарі, симпозіумі, воркшопі або подібному заході. Використовуйте це позначення, коли постер не публікується у збірнику матеріалів. [Джерело: http://purl.org/spar/fabio/ConferencePoster] + + + + Набір слайдів, що містять текст, таблиці або малюнки, призначені для передачі ідей або результатів досліджень, для проекції та перегляду аудиторією на конференції, симпозіумі, семінарі, лекції, воркшопі або інших заходах. [Джерело: адаптовано з http://purl.org/spar/fabio/Presentation] + + + + Збірник матеріалів конференції - це офіційний звіт про роботу конференції. Це збірник документів, який відповідає презентаціям, представленим на конференції. Він може містити додаткову інформацію. [Джерело: http://www.ieee.org/documents/confprocdefined.pdf ]. + + + + Стаття, опублікована в матеріалах конференції, як правило, є реалізацією наукової роботи, в якій викладено оригінальні результати дослідження. [Джерело: адаптовано з http://purl.org/spar/fabio/ConferencePaper] + + + + Плакат, опублікований у збірнику матеріалів конференції, який зазвичай містить текст з ілюстративними рисунками та/або таблицями, зазвичай повідомляє про результати досліджень або пропонує гіпотези, подається для прийняття та/або представляється на конференції, семінарі, симпозіумі, воркшопі або подібному заході. [Джерело: Адаптовано за http://purl.org/spar/fabio/ConferencePoster] + + + + + + + + Журнал - це серійне видання, призначене для поширення оригінальних досліджень і поточних розробок з певної тематики. (Адаптовано з ODLIS) [Джерело: http://dspacecris.eurocris.org/cris/classcerif/classcerif00422] + + + + Коротке есе, що виражає думку або позицію головного редактора (академічного) журналу щодо актуального політичного, соціального, культурного або професійного питання. [Джерело: Адаптовано з ODLIS [Джерело: http://www.abc-clio.com/ODLIS/odlis_e.aspx ]. + + + + Стаття, як правило, реалізація дослідницької роботи, що повідомляє про оригінальні результати дослідження, опублікована у випуску журналу. [Джерело: http://purl.org/spar/fabio/JournalArticle] + + + + Формальне виправлення помилки, внесеної автором у раніше опублікований документ. (адаптовано з https://sparontologies.github.io/fabio/current/fabio.html#d4e2712) + + + + Data paper - це наукова публікація, що описує певний набір або групу наборів даних, опублікована у вигляді рецензованої статті в науковому журналі. Основна мета документу даних - описати дані, обставини їхнього збору, а також інформацію, пов'язану з характеристиками даних, доступом до них і потенційним повторним використанням. Взято з https://en.wikipedia.org/wiki/Data_paper та http://www.gbif.org/publishing-data/data-papers + + + + Дослідницька стаття є першоджерелом, тобто повідомляє про методи і результати оригінального дослідження, виконаного авторами. (адаптовано з http://apus.libanswers.com/faq/2324) + + + + Оглядова стаття є вторинним джерелом, тобто вона написана про інші статті і не повідомляє про власні оригінальні дослідження. [Джерело: Адаптовано з http://apus.libanswers.com/faq/2324] + + + + Стаття про програмне забезпечення повинна містити обґрунтування розробки інструменту та деталі коду, використаного для його побудови. [Джерело: Адаптовано з https://f1000research.com/for-authors/article-guidelines/software-tool-articles ] + + + + + + Лист на ім'я редактора, в якому коментується або обговорюється тема, раніше опублікована цим виданням, або така, що становить інтерес для його читацької аудиторії. [Джерело: адаптовано з http://purl.org/spar/fabio/Letter] + + + + + + Транскрипція усного виступу/розмови, що має на меті представити інформацію або навчити людей певному предмету, наприклад, викладача університету або коледжу. [Джерело: Запозичено з https://en.wikipedia.org/wiki/Lecture] + + + + Короткий опис нового важливого дослідження, також відомого як "комунікація". [Джерело: https://cerif.eurocris.org/vocab/html/OutputTypes.html#Letter] + + + + Періодичне видання, яке зазвичай містить статті на різноманітні теми, написані різними авторами в ненауковому стилі, або професійне видання, на відміну від споживчого видання, висвітлює конкретну тему для людей, які працюють у цій галузі чи індустрії. [Джерело: Адаптовано з https://www.thebalance.com/what-is-a-trade-publication-exactly-2316039 та http://www.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Рукопис - це твір будь-якого виду (текст, напис, музична партитура, карта тощо), написаний повністю від руки. [Джерело: https://products.abc-clio.com/ODLIS/odlis_m.aspx] + + + + Символи, що використовуються для написання музики, як у нотному записі, а також для вираження математичних понять. +[Source: Adapted from https://products.abc-clio.com/ODLIS/odlis_n.aspx] + + + + Періодичне видання, що не рецензується, зазвичай виходить щодня або щотижня, складається переважно з редакційних статей та новин, що стосуються поточних або нещодавніх подій і питань, які становлять суспільний інтерес. [Джерело: http://purl.org/spar/fabio/Newspaper] + + + + Робота, що складається з новини, яка з'являється в газеті або іншому періодичному виданні, що містить актуальну та своєчасну інформацію в тій чи іншій галузі. (Запозичено з http://www.reference.md/files/D018/mD018431.html) + + + + + + Тип ресурсу, який не включено до існуючих термінів під головним поняттям "Текст". [визначення COAR]. + + + + Препринт - це науковий рукопис, який ще не пройшов рецензування і не був прийнятий журналом, як правило, розміщений автором на публічному сервері/репозиторії. [Джерело: Адаптовано з https://asapbio.org/preprint-info/preprint-faq#qaef-637] + + + + Звіт - це окремо опублікований запис результатів досліджень, досліджень, які ще тривають, політичних змін і подій або інших технічних результатів, який зазвичай має номер звіту, а іноді й номер гранту, присвоєний установою, що фінансує проект. Крім того, офіційний звіт про діяльність комітету або юридичної особи, засідання державного органу або розслідування, проведене агентством, як опублікований, так і приватний, зазвичай архівується або передається до вищого органу влади, добровільно або за мандатом. У більш загальному сенсі, будь-який офіційний звіт про факти або інформацію, пов'язану з конкретною подією або явищем, який іноді подається через регулярні проміжки часу. [Джерело: http://lu.com/odlis/odlis_R.cfm#report ]. + + + + Робота, яка звітує про результати наукового дослідження з метою оцінки втручань або впливів на біомедичні або пов'язані зі здоров'ям результати. Двома основними типами клінічних досліджень є інтервенційні дослідження (клінічні випробування) та обсерваційні дослідження. Хоча більшість клінічних досліджень стосуються людей, цей тип публікації може бути використаний для клінічних ветеринарних статей, що відповідають вимогам для людей. [Джерело: https://www.ncbi.nlm.nih.gov/mesh/2009830] + + + + Офіційна заява, що описує, як будуть управлятися і документуватися дослідницькі дані протягом дослідницького проекту, а також умови подальшої передачі даних в сховище даних для довгострокового управління і збереження. [Джерело: https://casrai.org/rdm-glossary] + + + + Офіційна записка, що розповсюджується всередині компанії, агентства, організації чи установи одній або кільком особам, із заголовком, що вказує на дату її надсилання та зазначає, кому вона адресована (To:), від кого вона надсилається (From:) та тему тексту (Re:). На відміну від листа, службова записка не вимагає повного привітання або підпису в кінці тексту - відправник може просто вказати своє ім'я в заголовку. [Джерело: https://products.abc-clio.com/ODLIS/odlis_m.aspx#memorandum] + + + + В аналітичному звіті представлено те, що відомо про певне питання чи проблему. У ньому зібрані факти і докази, які допомагають читачам зрозуміти складні питання і сформувати відповідь. Він може бути нейтральним, а може мати на меті переконати читачів у певному напрямку. [Джерело: https://www.uow.edu.au/student/learning-co-op/assessments/policy-report/#] + + + + Документ, що містить звіт про проект, призначений для надання замовнику або фінансовій установі, який описує результати, досягнуті в рамках конкретного проекту. [Джерело: http://purl.org/spar/fabio/ProjectReportDocument] + + + + Протокол - це детальний план наукового дослідження, що включає резюме проекту, опис проекту, що охоплює обґрунтування, цілі, методологію, управління даними та їх аналіз, етичні міркування, гендерні питання та посилання. [Джерело: Адаптовано з https://www.who.int/publications/i/item/a-practical-guide-for-health-researchers] + + + + Це публікація, яка повідомляє про результати дослідницького проекту або, як альтернатива, наукові спостереження на певну тему або про неї. [Джерело: адаптовано з https://en.wikipedia.org/wiki/Research_report] + + + + Документ, який описує процес, хід або результати технічного чи наукового дослідження або стан проблеми технічного чи наукового дослідження. Він також може містити рекомендації та висновки дослідження. [Джерело: http://guides.library.cornell.edu/ecommons/types] + + + + + + Дослідницька пропозиція - це документ, що пропонує дослідницький проект, як правило, в галузі науки або освіти, і, як правило, являє собою прохання про спонсорську підтримку цього дослідження. [Джерело: https://en.wikipedia.org/wiki/Research_proposal] + + + + Рецензія на чужі опубліковані роботи. [Джерело: адаптовано з http://purl.org/spar/fabio/Review] + + + + Письмовий огляд і критичний аналіз змісту, обсягу та якості книги або іншої монографічної праці. [Джерело: http://purl.org/spar/fabio/BookReview] + + + + Коментар - це більш поглиблений аналіз, написаний з метою привернути увагу до вже опублікованої роботи. Коментарі дещо схожі на "рецензії" в тому сенсі, що автор представляє свій аналіз роботи і пояснює, чому вона може бути цікавою для певної аудиторії. [Джерело: https://www.enago.com/academy/perspective-opinion-and-commentary-pieces] + + + + Оцінка наукової, академічної або професійної роботи іншими особами, які працюють у тій самій галузі. [Джерело: Взято з https://schema.datacite.org/meta/kernel-4.4/doc/DataCite-MetadataKernel_v4.4.pdf] + + + + + + Технічна документація - будь-який тип документації, що описує поводження, функціональність та архітектуру технічного продукту або продукту, що перебуває на стадії розробки чи використання. [Джерело: https://en.wikipedia.org/wiki/Technical_documentation] + + + + Книга, написана студентом, що містить офіційну презентацію результатів дослідження, яка подається на іспит після завершення курсу навчання у вищому навчальному закладі, щоб виконати вимоги для здобуття наукового ступеня. Також відома як дисертація. [Джерело: http://purl.org/spar/fabio/Thesis] + + + + Дипломна робота, що містить звіт про дослідницький проект, виконаний в рамках бакалаврського курсу навчання, що веде до здобуття ступеня бакалавра. [Джерело: http://purl.org/spar/fabio/BachelorsThesis] + + + + Дисертація, що містить звіт про дослідження, проведене під час навчання в аспірантурі для здобуття ступеня доктора наук. [Джерело: http://purl.org/spar/fabio/DoctoralThesis] + + + + Дипломна робота, в якій висвітлюється дослідницький проект, виконаний в рамках аспірантури, що веде до отримання ступеня магістра. [Джерело: http://purl.org/spar/fabio/MastersThesis] + + + + + + Письмовий запис слів, виголошених у судовому засіданні або у промові, інтерв'ю, радіо- чи телепередачі чи звукозаписі. [Джерело: адаптовано з https://products.abc-clio.com/ODLIS/odlis_t.aspx] + + + + Робочий або дискусійний документ, який поширюється публічно або серед групи колег. Деякі дисципліни, наприклад, економіка, випускають робочі документи серіями. [Джерело: http://www.ukoln.ac.uk/repositories/digirep/index/Eprints_Type_Vocabulary_Encoding_Scheme#:~:text=http%3A//purl.org/eprint/type/WorkingPaper] + + + + + + Тип ресурсу, який не включено до існуючих термінів. [Визначення COAR]. + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 8f95de44ed85..942b8e2a8aef 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -1765,15 +1765,13 @@ You can leave out the day and/or month if they aren't applicable. dc - subject - - - true - - twobox - Enter appropriate subject keywords or phrases. - - srsc + type + + onebox + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml index 20a1a7157b4f..4ad3759e19aa 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml @@ -94,14 +94,14 @@ - dc - type - - onebox - false - Devi selezionare un tipo di pubblicazione - Seleziona il tipo di contenuto della pubblicazione. - srsc_it + dc + type + + onebox + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types_it diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml index b6eebe4ef3b7..244c26204649 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml @@ -99,7 +99,7 @@ false Ви повинні вибрати тип публікації Виберіть тип вмісту публікації. - srsc_uk + publication-coar-types_uk
    diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java index cfe0aa60663e..770e3c83a2c7 100644 --- a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java @@ -2679,7 +2679,7 @@ public void testPublicationVirtualFieldWithVocabularyValuePairList() throws Exce String[] resultLines = out.toString().split("\n"); assertThat(resultLines.length, is(7)); assertThat(resultLines[0].trim(), equalTo("")); - assertThat(resultLines[4].trim(), equalTo("software paper")); + assertThat(resultLines[4].trim(), equalTo("articolo sul software")); assertThat(resultLines[5].trim(), equalTo("Inglese (USA)")); assertThat(resultLines[6].trim(), equalTo("")); @@ -2690,7 +2690,7 @@ public void testPublicationVirtualFieldWithVocabularyValuePairList() throws Exce resultLines = out.toString().split("\n"); assertThat(resultLines.length, is(7)); assertThat(resultLines[0].trim(), equalTo("")); - assertThat(resultLines[4].trim(), equalTo("software paper")); + assertThat(resultLines[4].trim(), equalTo("програмна стаття")); assertThat(resultLines[5].trim(), equalTo("Американська (USA)")); assertThat(resultLines[6].trim(), equalTo("")); @@ -2760,7 +2760,7 @@ public void testPublicationMultilanguageVirtualFieldValuePairList() throws Excep // set italian locale context.setCurrentLocale(Locale.ITALIAN); - String subjectVocabularyName = "srsc"; + String subjectVocabularyName = "publication-coar-types"; Collection publicationCollection = createCollection(context, community) .withEntityType("Publication") @@ -2770,7 +2770,7 @@ public void testPublicationMultilanguageVirtualFieldValuePairList() throws Excep Item publicationItem = createItem(context, publicationCollection) .withTitle("Publication title") - .withType("not translated", subjectVocabularyName + ":SCB16") + .withType("not translated", subjectVocabularyName + ":c_7bab") .withLanguage("en_US") .build(); @@ -2795,7 +2795,7 @@ public void testPublicationMultilanguageVirtualFieldValuePairList() throws Excep String[] resultLines = out.toString().split("\n"); assertThat(resultLines.length, is(7)); assertThat(resultLines[0].trim(), equalTo("")); - assertThat(resultLines[3].trim(), equalTo("TECNOLOGIA")); + assertThat(resultLines[3].trim(), equalTo("articolo sul software")); assertThat(resultLines[4].trim(), equalTo("Inglese (USA)")); assertThat(resultLines[5].trim(), equalTo("Italia")); assertThat(resultLines[6].trim(), equalTo("")); @@ -2811,7 +2811,7 @@ public void testPublicationMultilanguageVirtualFieldValuePairList() throws Excep resultLines = out.toString().split("\n"); assertThat(resultLines.length, is(7)); assertThat(resultLines[0].trim(), equalTo("")); - assertThat(resultLines[3].trim(), equalTo("ТЕХНОЛОГІЯ")); + assertThat(resultLines[3].trim(), equalTo("програмна стаття")); assertThat(resultLines[4].trim(), equalTo("Американська (USA)")); // take value from submission_forms (_uk doesn't have the value-pair) assertThat(resultLines[5].trim(), equalTo("Italia")); @@ -2830,7 +2830,7 @@ public void testPublicationMultilanguageVirtualFieldValuePairList() throws Excep assertThat(resultLines.length, is(7)); // takes the value from default (_ru doesn't exist) assertThat(resultLines[0].trim(), equalTo("")); - assertThat(resultLines[3].trim(), equalTo("TECHNOLOGY")); + assertThat(resultLines[3].trim(), equalTo("software paper")); assertThat( resultLines[4].trim(), equalTo("English (United States)") ); From ab23382c549777cf2cfdced1ea9e0ee015ad1c65 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Wed, 8 Nov 2023 17:28:41 +0100 Subject: [PATCH 527/686] [DSC-1351] fix item iterator broke after context.commit --- .../dao/impl/RequestItemDAOImpl.java | 2 +- .../content/dao/impl/BitstreamDAOImpl.java | 10 +- .../dspace/content/dao/impl/ItemDAOImpl.java | 34 +++--- .../dao/impl/MetadataValueDAOImpl.java | 4 +- .../org/dspace/core/AbstractHibernateDAO.java | 25 ++-- .../java/org/dspace/core/UUIDIterator.java | 107 ++++++++++++++++++ .../main/java/org/dspace/curate/Curation.java | 4 +- .../main/java/org/dspace/curate/Curator.java | 59 ++++++---- .../general/CreateMissingIdentifiersIT.java | 4 +- 9 files changed, 185 insertions(+), 64 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/core/UUIDIterator.java diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java index 008174ded88c..a09a2bf250e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java @@ -46,6 +46,6 @@ public RequestItem findByToken(Context context, String token) throws SQLExceptio public Iterator findByItem(Context context, Item item) throws SQLException { Query query = createQuery(context, "FROM RequestItem WHERE item_id= :uuid"); query.setParameter("uuid", item.getID()); - return iterate(query); + return iterate(context, query, RequestItem.class); } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java index 783bac12b5dc..7a75f4bef3f3 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java @@ -86,7 +86,7 @@ public Iterator findByCommunity(Context context, Community community) query.setParameter("community", community); - return iterate(query); + return iterate(context, query, Bitstream.class); } @Override @@ -99,7 +99,7 @@ public Iterator findByCollection(Context context, Collection collecti query.setParameter("collection", collection); - return iterate(query); + return iterate(context, query, Bitstream.class); } @Override @@ -111,7 +111,7 @@ public Iterator findByItem(Context context, Item item) throws SQLExce query.setParameter("item", item); - return iterate(query); + return iterate(context, query, Bitstream.class); } @Override @@ -151,14 +151,14 @@ public Iterator findShowableByItem(Context context, UUID itemId, Stri query.setParameter("itemId", itemId); query.setParameter("bundleName", bundleName); - return iterate(query); + return iterate(context, query, Bitstream.class); } @Override public Iterator findByStoreNumber(Context context, Integer storeNumber) throws SQLException { Query query = createQuery(context, "select b from Bitstream b where b.storeNumber = :storeNumber"); query.setParameter("storeNumber", storeNumber); - return iterate(query); + return iterate(context, query, Bitstream.class); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java index 378084ee8c43..443268cbbb7a 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java @@ -58,7 +58,7 @@ protected ItemDAOImpl() { public Iterator findAll(Context context, boolean archived) throws SQLException { Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -67,7 +67,7 @@ public Iterator findAll(Context context, boolean archived, int limit, int query.setParameter("in_archive", archived); query.setFirstResult(offset); query.setMaxResults(limit); - return iterate(query); + return iterate(context, query, Item.class); } @@ -77,7 +77,7 @@ public Iterator findAll(Context context, boolean archived, boolean withdra "FROM Item WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id"); query.setParameter("in_archive", archived); query.setParameter("withdrawn", withdrawn); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -91,7 +91,7 @@ public Iterator findAllRegularItems(Context context) throws SQLException { "WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " + "ORDER BY i.id" ); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -115,7 +115,7 @@ public Iterator findAll(Context context, boolean archived, if (lastModified != null) { query.setParameter("last_modified", lastModified, TemporalType.TIMESTAMP); } - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -124,7 +124,7 @@ public Iterator findBySubmitter(Context context, EPerson eperson) throws S "FROM Item WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id"); query.setParameter("in_archive", true); query.setParameter("submitter", eperson); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -135,7 +135,7 @@ public Iterator findBySubmitter(Context context, EPerson eperson, boolean } Query query = createQuery(context, "FROM Item WHERE submitter=:submitter ORDER BY id"); query.setParameter("submitter", eperson); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -154,7 +154,7 @@ public Iterator findBySubmitter(Context context, EPerson eperson, Metadata hibernateQuery.setParameter("in_archive", true); hibernateQuery.setParameter("submitter", eperson); hibernateQuery.setMaxResults(limit); - return iterate(hibernateQuery); + return iterate(context, hibernateQuery, Item.class); } @Override @@ -172,7 +172,7 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { query.setParameter("text_value", value); } - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -189,7 +189,7 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { query.setParameter("text_value", value); } - return iterate(query); + return iterate(context, query, Item.class); } enum OP { @@ -316,7 +316,7 @@ public Iterator findByAuthorityValue(Context context, MetadataField metada query.setParameter("in_archive", inArchive); query.setParameter("metadata_field", metadataField); query.setParameter("authority", authority); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -333,7 +333,7 @@ public Iterator findArchivedByCollection(Context context, Collection colle if (limit != null) { query.setMaxResults(limit); } - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -371,7 +371,7 @@ public Iterator findAllByCollection(Context context, Collection collection "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -388,7 +388,7 @@ public Iterator findAllByCollection(Context context, Collection collection query.setMaxResults(limit); } - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -426,7 +426,7 @@ public Iterator findByLastModifiedSince(Context context, Date since) Query query = createQuery(context, "SELECT i FROM Item i WHERE last_modified > :last_modified ORDER BY id"); query.setParameter("last_modified", since, TemporalType.TIMESTAMP); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -469,7 +469,7 @@ public Iterator findByLikeAuthorityValue(Context context, query.setParameter("in_archive", inArchive); } query.setParameter("authority", likeAuthority); - return iterate(query); + return iterate(context, query, Item.class); } @Override @@ -477,7 +477,7 @@ public Iterator findByIds(Context context, List ids) throws SQLExcep Query query = createQuery(context, "SELECT item " + "FROM Item as item WHERE item.id IN (:ids)"); query.setParameter("ids", ids); - return iterate(query); + return iterate(context, query, Item.class); } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java index f37ced9ab7d4..2450db5bd19b 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java @@ -58,7 +58,7 @@ public Iterator findItemValuesByFieldAndValue(Context context, Query query = createQuery(context, queryString); query.setParameter("metadata_field_id", metadataField.getID()); query.setParameter("text_value", value); - return iterate(query); + return iterate(context, query, MetadataValue.class); } @Override @@ -69,7 +69,7 @@ public Iterator findByValueLike(Context context, String value) th Query query = createQuery(context, queryString); query.setParameter("searchString", value); - return iterate(query); + return iterate(context, query, MetadataValue.class); } @Override diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 32ad747d765e..8db06988f3ae 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -7,6 +7,8 @@ */ package org.dspace.core; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; import java.sql.SQLException; import java.util.Iterator; import java.util.List; @@ -19,7 +21,6 @@ import javax.persistence.criteria.Expression; import javax.persistence.criteria.Root; -import com.google.common.collect.AbstractIterator; import org.apache.commons.collections.CollectionUtils; import org.hibernate.Session; @@ -33,8 +34,14 @@ */ public abstract class AbstractHibernateDAO implements GenericDAO { - protected AbstractHibernateDAO() { + private Class entityTypeClass; + protected AbstractHibernateDAO() { + Type type = getClass().getGenericSuperclass(); + if (type instanceof ParameterizedType) { + ParameterizedType paramType = (ParameterizedType) type; + entityTypeClass = (Class) paramType.getActualTypeArguments()[0]; + } } @Override @@ -297,22 +304,14 @@ public T uniqueResult(Query query) { * @param query * The query for which an Iterator will be made * @return The Iterator for the results of this query + * @throws SQLException */ - public Iterator iterate(Query query) { + public Iterator iterate(Context ctx, Query query, Class entityType) throws SQLException { @SuppressWarnings("unchecked") org.hibernate.query.Query hquery = query.unwrap(org.hibernate.query.Query.class); Stream stream = hquery.stream(); Iterator iter = stream.iterator(); - return new AbstractIterator () { - @Override - protected T computeNext() { - return iter.hasNext() ? iter.next() : endOfData(); - } - @Override - public void finalize() { - stream.close(); - } - }; + return new UUIDIterator(ctx, iter, entityType); } /** diff --git a/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java new file mode 100644 index 000000000000..f14ece677426 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.UUID; + +import com.google.common.collect.AbstractIterator; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.springframework.beans.factory.annotation.Autowired; + + + +/** + * Iterator implementation which allows to iterate over items and commit while + * iterating. Using a list of UUID the iterator doesn't get invalidated after a + * commit + * + * @author stefano.maffei at 4science.com + * @param class type + */ +public class UUIDIterator extends AbstractIterator { + + private Class entityTypeClass; + + private List cachedUUIDs = new LinkedList<>(); + + private Iterator uuidIterator; + + private Iterator iterator; + + @SuppressWarnings("rawtypes") + @Autowired + private DSpaceObjectService dsoService; + + private Context ctx; + + private boolean isSupportedUUIDIterator; + + public UUIDIterator(Context ctx, Iterator i, Class entityTypeClass) throws SQLException { + this.ctx = ctx; + + this.entityTypeClass = entityTypeClass; + isSupportedUUIDIterator = DSpaceObject.class.isAssignableFrom(this.entityTypeClass); + + if (isSupportedUUIDIterator) { + while (i.hasNext()) { + DSpaceObject dso = (DSpaceObject) i.next(); + if (dsoService == null) { + dsoService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + } + cachedUUIDs.add(dso.getID()); + } + uuidIterator = cachedUUIDs.iterator(); + } else { + iterator = i; + } + + } + + public UUIDIterator(Context ctx, Collection collection, Class entityTypeClass) throws SQLException { + this.ctx = ctx; + + this.entityTypeClass = entityTypeClass; + isSupportedUUIDIterator = DSpaceObject.class.isAssignableFrom(this.entityTypeClass); + + if (isSupportedUUIDIterator) { + for (T obj : collection) { + DSpaceObject dso = (DSpaceObject) obj; + if (dsoService == null) { + dsoService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + } + cachedUUIDs.add(dso.getID()); + } + uuidIterator = cachedUUIDs.iterator(); + } else { + throw new UnsupportedOperationException("Cannot generate iterator for this collection"); + } + + } + + @SuppressWarnings("unchecked") + @Override + protected T computeNext() { + try { + if (isSupportedUUIDIterator) { + return uuidIterator.hasNext() ? (T) dsoService.find(ctx, uuidIterator.next()) : endOfData(); + } else { + return iterator.hasNext() ? (T) iterator.next() : endOfData(); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java index b3af072a32cd..e5cbfce4210c 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curation.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java @@ -175,7 +175,9 @@ private long runQueue(TaskQueue queue, Curator curator) throws SQLException, Aut * @throws SQLException If DSpace contextx can't complete */ private void endScript(long timeRun) throws SQLException { - context.complete(); + if (context.isValid()) { + context.complete(); + } if (verbose) { long elapsed = System.currentTimeMillis() - timeRun; this.handler.logInfo("Ending curation. Elapsed time: " + elapsed); diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index 4076fab51989..696b7078c460 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -28,6 +28,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.UUIDIterator; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; @@ -88,7 +89,7 @@ public static enum TxScope { protected Appendable reporter = null; protected Invoked iMode = null; protected TaskResolver resolver = new TaskResolver(); - protected TxScope txScope = TxScope.OPEN; + protected TxScope txScope = TxScope.OBJECT; protected CommunityService communityService; protected ItemService itemService; protected HandleService handleService; @@ -258,7 +259,7 @@ public void curate(Context c, String id) throws IOException { } // if curation scoped, commit transaction if (txScope.equals(TxScope.CURATION)) { - Context ctx = curationCtx.get(); + Context ctx = curationContext(); if (ctx != null) { ctx.complete(); } @@ -275,8 +276,9 @@ public void curate(Context c, String id) throws IOException { * (Community, Collection or Item). * @param dso the DSpace object * @throws IOException if IO error + * @throws SQLException */ - public void curate(DSpaceObject dso) throws IOException { + public void curate(DSpaceObject dso) throws IOException, SQLException { if (dso == null) { throw new IOException("Cannot perform curation task(s) on a null DSpaceObject!"); } @@ -307,9 +309,10 @@ public void curate(DSpaceObject dso) throws IOException { * @param c session context in which curation takes place. * @param dso the single object to be curated. * @throws java.io.IOException passed through. + * @throws SQLException */ public void curate(Context c, DSpaceObject dso) - throws IOException { + throws IOException, SQLException { curationCtx.set(c); curate(dso); } @@ -461,8 +464,10 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { //Then, perform this task for all Top-Level Communities in the Site // (this will recursively perform task for all objects in DSpace) - for (Community subcomm : communityService.findAllTop(ctx)) { - if (!doCommunity(tr, subcomm)) { + Iterator iterator = new UUIDIterator(ctx, communityService.findAllTop(ctx), + Community.class); + while (iterator.hasNext()) { + if (!doCommunity(tr, iterator.next())) { return false; } } @@ -480,18 +485,26 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { * @param comm Community * @return true if successful, false otherwise * @throws IOException if IO error + * @throws SQLException */ - protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException { + protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, SQLException { if (!tr.run(comm)) { return false; } - for (Community subcomm : comm.getSubcommunities()) { - if (!doCommunity(tr, subcomm)) { + UUIDIterator subComIter = new UUIDIterator(curationContext(), comm.getSubcommunities(), + Community.class); + UUIDIterator collectionsIter = new UUIDIterator(curationContext(), + comm.getCollections(), + Collection.class); + + while (subComIter.hasNext()) { + if (!doCommunity(tr, subComIter.next())) { return false; } } - for (Collection coll : comm.getCollections()) { - if (!doCollection(tr, coll)) { + + while (collectionsIter.hasNext()) { + if (!doCollection(tr, collectionsIter.next())) { return false; } } @@ -517,6 +530,9 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio Item item = iter.next(); boolean shouldContinue = tr.run(item); context.uncacheEntity(item); + if (txScope.equals(TxScope.OBJECT) && context.isValid()) { + context.commit(); + } if (!shouldContinue) { return false; } @@ -533,13 +549,12 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio * * @param dso the DSpace object * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. + * @throws SQLException */ - protected void visit(DSpaceObject dso) throws IOException { - Context curCtx = curationCtx.get(); - if (curCtx != null) { - if (txScope.equals(TxScope.OBJECT)) { - curCtx.dispatchEvents(); - } + protected void visit(DSpaceObject dso) throws IOException, SQLException { + Context curCtx = curationContext(); + if (curCtx != null && txScope.equals(TxScope.OBJECT)) { + curCtx.dispatchEvents(); } } @@ -552,7 +567,7 @@ public TaskRunner(ResolvedTask task) { this.task = task; } - public boolean run(DSpaceObject dso) throws IOException { + public boolean run(DSpaceObject dso) throws IOException, SQLException { try { if (dso == null) { throw new IOException("DSpaceObject is null"); @@ -562,14 +577,14 @@ public boolean run(DSpaceObject dso) throws IOException { logInfo(logMessage(id)); visit(dso); return !suspend(statusCode); - } catch (IOException ioe) { + } catch (IOException | SQLException e) { //log error & pass exception upwards - System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe); - throw ioe; + System.out.println("Error executing curation task '" + task.getName() + "'; " + e); + throw e; } } - public boolean run(Context c, String id) throws IOException { + public boolean run(Context c, String id) throws IOException, SQLException { try { if (c == null || id == null) { throw new IOException("Context or identifier is null"); diff --git a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java index 88610ea95943..480604e9393e 100644 --- a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java +++ b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java @@ -9,8 +9,6 @@ import static org.junit.Assert.assertEquals; -import java.io.IOException; - import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -40,7 +38,7 @@ public class CreateMissingIdentifiersIT @Test public void testPerform() - throws IOException { + throws Exception { // Must remove any cached named plugins before creating a new one CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses(); ConfigurationService configurationService = kernelImpl.getConfigurationService(); From 2f934a87b1e556d78e8c9f024ffd5cc6f739dd0d Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 17:44:18 +0100 Subject: [PATCH 528/686] [DSC-1229] Fixes DiscoveryRestControllerMultiLanguageIT --- .../config/spring/api/test-discovery.xml | 1 + ...iscoveryRestControllerMultiLanguageIT.java | 28 ++++++++++++++----- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index 95515815ef68..e52efa5e5401 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -3736,6 +3736,7 @@ + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java index 08118f6e7fcf..91414847b7e1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java @@ -298,7 +298,10 @@ public void discoverFacetsTypesTest() throws Exception { .withTitle("Test 1") .withIssueDate("2010-10-17") .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "srsc:SCB14") + .withType( + "Resource Types::text::journal::journal article::software paper", + "publication-coar-types:c_7bab" + ) .build(); context.restoreAuthSystemState(); @@ -306,24 +309,35 @@ public void discoverFacetsTypesTest() throws Exception { getClient().perform(get("/api/discover/facets/types") .header("Accept-Language", Locale.ITALIAN.getLanguage()) .param("configuration", "multilanguage-types") - .param("prefix", "matem")) + .param("prefix", "art")) .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$.name", is("types"))) .andExpect(jsonPath("$.facetType", is("text"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("MATEMATICA","srsc:SCB14")))); + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.entryTypes( + "articolo sul software","publication-coar-types:c_7bab" + ) + ) + )); getClient().perform(get("/api/discover/facets/types") .header("Accept-Language", "uk") .param("configuration", "multilanguage-types") - .param("prefix", "мат")) + .param("prefix", "про")) .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$.name", is("types"))) .andExpect(jsonPath("$.facetType", is("text"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("МАТЕМАТИКА","srsc:SCB14")))); + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.entryTypes( + "програмна стаття", + "publication-coar-types:c_7bab" + ) + ) + )); } From 37f909e1a2503605d2b89c8939609792103b2d05 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 18:06:59 +0100 Subject: [PATCH 529/686] [DSC-1229] Fixes DiscoveryScopeBasedRestControllerIT --- .../app/rest/DiscoveryScopeBasedRestControllerIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index 90bd209a4598..18f949fe9e41 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -502,7 +502,7 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), FacetEntryMatcher.authorFacet(false), @@ -618,7 +618,7 @@ public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), FacetEntryMatcher.authorFacet(false), @@ -672,7 +672,7 @@ public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), FacetEntryMatcher.anyFacet("graphitemtype", "chart.pie"), FacetEntryMatcher.anyFacet("graphpubldate", "chart.bar"), FacetEntryMatcher.authorFacet(false), From d9233e1df36b82b42daa1f682cb67cf039a20523 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 18:10:44 +0100 Subject: [PATCH 530/686] [DSC-1229] Fixes DiscoveryRelatedBoxComponentIT --- .../org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java index 9d9323483549..9902bfa24797 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRelatedBoxComponentIT.java @@ -247,7 +247,7 @@ public void discoverSearchObjectsTestWithScope() throws Exception { FacetEntryMatcher.anyFacet("editor", "text"), FacetEntryMatcher.anyFacet("organization", "text"), FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), FacetEntryMatcher.subjectFacet(false), FacetEntryMatcher.dateIssuedFacet(false), FacetEntryMatcher.hasContentInOriginalBundleFacet(false) @@ -287,7 +287,7 @@ public void discoverSearchObjectsTestWithScope() throws Exception { FacetEntryMatcher.anyFacet("editor", "text"), FacetEntryMatcher.anyFacet("organization", "text"), FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), FacetEntryMatcher.subjectFacet(false), FacetEntryMatcher.dateIssuedFacet(false), FacetEntryMatcher.hasContentInOriginalBundleFacet(false) @@ -329,7 +329,7 @@ public void discoverSearchObjectsTestWithScope() throws Exception { FacetEntryMatcher.anyFacet("editor", "text"), FacetEntryMatcher.anyFacet("organization", "text"), FacetEntryMatcher.anyFacet("funding", "text"), - FacetEntryMatcher.anyFacet("itemtype", "text"), + FacetEntryMatcher.anyFacet("itemtype", "hierarchical"), FacetEntryMatcher.subjectFacet(false), FacetEntryMatcher.dateIssuedFacet(false), FacetEntryMatcher.hasContentInOriginalBundleFacet(false) From db082d5f62e16f1af11a3b12b70b8c4f45f041ce Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 18:17:50 +0100 Subject: [PATCH 531/686] [DSC-1229] Fixes BulkAccessControlScriptIT --- .../dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java index da0f90ca97c7..3eb8eab626d4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java @@ -496,7 +496,7 @@ public void bulkAccessScriptWithNormalUserTest() throws Exception { multipart(CURATE_SCRIPT_ENDPOINT) .file(bitstreamFile) .param("properties", new ObjectMapper().writeValueAsString(list))) - .andExpect(status().isForbidden()); + .andExpect(status().isAccepted()); } } From 96345ce820e5b93c7e9062e2e7a38fac147572a4 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 18:40:11 +0100 Subject: [PATCH 532/686] [DSC-1229] Fixes CurationScriptIT --- .../script/BulkItemExportScriptConfiguration.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java index 717d62b3df92..168e720e581b 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/BulkItemExportScriptConfiguration.java @@ -8,12 +8,14 @@ package org.dspace.content.integration.crosswalks.script; import java.sql.SQLException; +import java.util.List; import java.util.Optional; import org.apache.commons.cli.Options; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; @@ -29,6 +31,11 @@ public class BulkItemExportScriptConfiguration extends private Class dspaceRunnableClass; + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + return this.isAllowedToExecute(context); + } + @Override public boolean isAllowedToExecute(Context context) { StringBuilder property = new StringBuilder("bulk-export.limit."); From fea366f8c0e0d3099adeaec698a5ef61248567f5 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 18:40:26 +0100 Subject: [PATCH 533/686] [DSC-1229] Fixes BulkAccessControlScriptIT --- .../crosswalks/script/ItemExportScriptConfiguration.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java index 22f65cb3b907..ff22414e0596 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/script/ItemExportScriptConfiguration.java @@ -7,8 +7,11 @@ */ package org.dspace.content.integration.crosswalks.script; +import java.util.List; + import org.apache.commons.cli.Options; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -22,6 +25,11 @@ public class ItemExportScriptConfiguration extends ScriptC private Class dspaceRunnableClass; + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + return this.isAllowedToExecute(context); + } + @Override public boolean isAllowedToExecute(Context context) { return true; From 2f7ec20f1687fcf1257a0f3f80e8b20a75e51183 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 8 Nov 2023 18:40:35 +0100 Subject: [PATCH 534/686] [DSC-1229] Fixes ScriptRestRepositoryIT --- .../org/dspace/app/rest/repository/ScriptRestRepository.java | 2 +- .../test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index 8570c7096f3c..6cf8295898cd 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -101,7 +101,7 @@ public ProcessRest startProcess(Context context, String scriptName, List Date: Wed, 8 Nov 2023 21:25:50 +0100 Subject: [PATCH 535/686] [DSC-1229] Fixes BulkAccessControlScriptIT --- .../dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java index 3eb8eab626d4..da0f90ca97c7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptIT.java @@ -496,7 +496,7 @@ public void bulkAccessScriptWithNormalUserTest() throws Exception { multipart(CURATE_SCRIPT_ENDPOINT) .file(bitstreamFile) .param("properties", new ObjectMapper().writeValueAsString(list))) - .andExpect(status().isAccepted()); + .andExpect(status().isForbidden()); } } From c1a48e551254a3e6677d9700c1bc7a9902fe62db Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Fri, 15 Sep 2023 14:22:56 +0100 Subject: [PATCH 536/686] XmlWorkflowCuratorServiceImpl: add check to queue task if configured; Curation: remove obsolete code preventing curation running on workflow tasks as #3157 is now implemented --- .../main/java/org/dspace/curate/Curation.java | 13 +--- .../curate/XmlWorkflowCuratorServiceImpl.java | 77 +++++++++++-------- 2 files changed, 46 insertions(+), 44 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java index b3af072a32cd..4d70286e79e0 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curation.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java @@ -152,17 +152,10 @@ private long runQueue(TaskQueue queue, Curator curator) throws SQLException, Aut super.handler.logInfo("Curating id: " + entry.getObjectId()); } curator.clear(); - // does entry relate to a DSO or workflow object? - if (entry.getObjectId().indexOf('/') > 0) { - for (String taskName : entry.getTaskNames()) { - curator.addTask(taskName); - } - curator.curate(context, entry.getObjectId()); - } else { - // TODO: Remove this exception once curation tasks are supported by configurable workflow - // e.g. see https://github.com/DSpace/DSpace/pull/3157 - throw new IllegalArgumentException("curation for workflow items is no longer supported"); + for (String taskName : entry.getTaskNames()) { + curator.addTask(taskName); } + curator.curate(context, entry.getObjectId()); } queue.release(this.queue, ticket, true); return ticket; diff --git a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java index 70a36f278ed1..27a162d543c2 100644 --- a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; @@ -139,40 +140,48 @@ public boolean curate(Curator curator, Context c, XmlWorkflowItem wfi) item.setOwningCollection(wfi.getCollection()); for (Task task : step.tasks) { curator.addTask(task.name); - curator.curate(c, item); - int status = curator.getStatus(task.name); - String result = curator.getResult(task.name); - String action = "none"; - switch (status) { - case Curator.CURATE_FAIL: - // task failed - notify any contacts the task has assigned - if (task.powers.contains("reject")) { - action = "reject"; - } - notifyContacts(c, wfi, task, "fail", action, result); - // if task so empowered, reject submission and terminate - if ("reject".equals(action)) { - workflowService.sendWorkflowItemBackSubmission(c, wfi, - c.getCurrentUser(), null, - task.name + ": " + result); - return false; - } - break; - case Curator.CURATE_SUCCESS: - if (task.powers.contains("approve")) { - action = "approve"; - } - notifyContacts(c, wfi, task, "success", action, result); - if ("approve".equals(action)) { - // cease further task processing and advance submission - return true; - } - break; - case Curator.CURATE_ERROR: - notifyContacts(c, wfi, task, "error", action, result); - break; - default: - break; + + // Check whether the task is configured to be queued rather than automatically run + if (StringUtils.isNotEmpty(step.queue)) { + // queue attribute has been set in the FlowStep configuration: add task to configured queue + curator.queue(c, item.getID().toString(), step.queue); + } else { + // Task is configured to be run automatically + curator.curate(c, item); + int status = curator.getStatus(task.name); + String result = curator.getResult(task.name); + String action = "none"; + switch (status) { + case Curator.CURATE_FAIL: + // task failed - notify any contacts the task has assigned + if (task.powers.contains("reject")) { + action = "reject"; + } + notifyContacts(c, wfi, task, "fail", action, result); + // if task so empowered, reject submission and terminate + if ("reject".equals(action)) { + workflowService.sendWorkflowItemBackSubmission(c, wfi, + c.getCurrentUser(), null, + task.name + ": " + result); + return false; + } + break; + case Curator.CURATE_SUCCESS: + if (task.powers.contains("approve")) { + action = "approve"; + } + notifyContacts(c, wfi, task, "success", action, result); + if ("approve".equals(action)) { + // cease further task processing and advance submission + return true; + } + break; + case Curator.CURATE_ERROR: + notifyContacts(c, wfi, task, "error", action, result); + break; + default: + break; + } } curator.clear(); } From 0172cf6508ba85c3efbe627d811a5a45aabd1baf Mon Sep 17 00:00:00 2001 From: wwuck Date: Thu, 26 Oct 2023 23:16:29 +1100 Subject: [PATCH 537/686] Return both user and operational LDAP attributes Explicitly request both user and operation attributes for LDAP group search as the default searching does not include operational attributes. This is required to fetch the memberOf attribute when checking LDAP group membership. Fixes #9151 (cherry picked from commit 56b7cbf4dbcc4a1ec201518f291c119470cc4e93) --- .../java/org/dspace/authenticate/LDAPAuthentication.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index aced16876db0..585eaf9cd8b1 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -494,6 +494,8 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con try { SearchControls ctrls = new SearchControls(); ctrls.setSearchScope(ldap_search_scope_value); + // Fetch both user attributes '*' (eg. uid, cn) and operational attributes '+' (eg. memberOf) + ctrls.setReturningAttributes(new String[] {"*", "+"}); String searchName; if (useTLS) { @@ -700,13 +702,13 @@ public String getName() { /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. - * + * * @param dn * The string containing distinguished name of the user - * + * * @param group * List of strings with LDAP dn of groups - * + * * @param context * DSpace context */ From f687063588d3ea722750909b5a7bd1d2a846c5ed Mon Sep 17 00:00:00 2001 From: Alexander K Date: Thu, 9 Nov 2023 10:43:15 +0100 Subject: [PATCH 538/686] [DSC-1189] change pattern for get thumbnails --- .../src/main/java/org/dspace/content/BitstreamServiceImpl.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index b07f23ee23ff..0b7ce4b097cf 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -416,7 +416,8 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + Pattern pattern = Pattern.compile("^" + + (bitstream.getName() != null ? Pattern.quote(bitstream.getName()) : bitstream.getName()) + ".([^.]+)$"); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { From 4b2957358fad2c410b727c433972867f81db0112 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Wed, 8 Nov 2023 15:41:22 +0200 Subject: [PATCH 539/686] [DURACOM-200] improvement of checker script (cherry picked from commit 5a7c7956cd4e8b47f6a6f53adbc646adeddb0f88) --- .../java/org/dspace/content/dao/impl/BitstreamDAOImpl.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java index d6d77fe7f0c7..0e051625aaee 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java @@ -68,9 +68,9 @@ public List findDuplicateInternalIdentifier(Context context, Bitstrea @Override public List findBitstreamsWithNoRecentChecksum(Context context) throws SQLException { - Query query = createQuery(context, - "select b from Bitstream b where b not in (select c.bitstream from " + - "MostRecentChecksum c)"); + Query query = createQuery(context, "SELECT b FROM MostRecentChecksum c RIGHT JOIN Bitstream b " + + "ON c.bitstream = b WHERE c IS NULL" ); + return query.getResultList(); } From adf644535c313c66b306340691eab3237811ac89 Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Wed, 8 Nov 2023 12:02:37 +0100 Subject: [PATCH 540/686] 108055: isClosed method should use xml configuration (cherry picked from commit d800d800d5346ea9a526ba2a880fc93a6892da98) --- .../app/rest/converter/SubmissionFormConverter.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java index 4febcd559479..daea935f53ff 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java @@ -125,7 +125,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { dcinput.getVocabulary())); selMd.setClosed( isClosed(dcinput.getSchema(), dcinput.getElement(), dcinput.getQualifier(), - dcinput.getPairsType(), dcinput.getVocabulary())); + dcinput.getPairsType(), dcinput.getVocabulary(), dcinput.isClosedVocabulary())); } else { inputRest.setType(inputType); } @@ -145,7 +145,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { selMd.setControlledVocabulary(getAuthorityName(dcinput.getSchema(), dcinput.getElement(), pairs.get(idx + 1), dcinput.getPairsType(), dcinput.getVocabulary())); selMd.setClosed(isClosed(dcinput.getSchema(), dcinput.getElement(), - dcinput.getQualifier(), null, dcinput.getVocabulary())); + dcinput.getQualifier(), null, dcinput.getVocabulary(), dcinput.isClosedVocabulary())); } selectableMetadata.add(selMd); } @@ -212,9 +212,11 @@ private String getAuthorityName(String schema, String element, String qualifier, } private boolean isClosed(String schema, String element, String qualifier, String valuePairsName, - String vocabularyName) { - if (StringUtils.isNotBlank(valuePairsName) || StringUtils.isNotBlank(vocabularyName)) { + String vocabularyName, boolean isClosedVocabulary) { + if (StringUtils.isNotBlank(valuePairsName)) { return true; + } else if (StringUtils.isNotBlank(vocabularyName)) { + return isClosedVocabulary; } return authorityUtils.isClosed(schema, element, qualifier); } From ec972ea2de410b395f5deec8b301b42f48b12197 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 17 Oct 2023 16:28:37 +0200 Subject: [PATCH 541/686] [DURACOM-192] Authentication Method related special groups are put in claim set even if a different authentication method is used (cherry picked from commit 6504d749b91508096300e4545069a0554eb5934b) --- .../authenticate/AuthenticationMethod.java | 16 ++++++++++++++++ .../authenticate/AuthenticationServiceImpl.java | 13 +++++++++---- .../dspace/authenticate/IPAuthentication.java | 5 +++++ 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java index 274779e92877..500ee04a979b 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java @@ -153,6 +153,22 @@ public boolean allowSetPassword(Context context, public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException; + /** + * Returns true if the special groups returned by + * {@link org.dspace.authenticate.AuthenticationMethod#getSpecialGroups(Context, HttpServletRequest)} + * should be implicitly be added to the groups related to the current user. By + * default this is true if the authentication method is the actual + * authentication mechanism used by the user. + * @param context A valid DSpace context. + * @param request The request that started this operation, or null if not + * applicable. + * @return true is the special groups must be considered, false + * otherwise + */ + public default boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return getName().equals(context.getAuthenticationMethod()); + } + /** * Authenticate the given or implicit credentials. * This is the heart of the authentication method: test the diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java index a9449b87d4e3..1d67da37ecb3 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java @@ -179,10 +179,15 @@ public List getSpecialGroups(Context context, int totalLen = 0; for (AuthenticationMethod method : getAuthenticationMethodStack()) { - List gl = method.getSpecialGroups(context, request); - if (gl.size() > 0) { - result.addAll(gl); - totalLen += gl.size(); + + if (method.areSpecialGroupsApplicable(context, request)) { + + List gl = method.getSpecialGroups(context, request); + if (gl.size() > 0) { + result.addAll(gl); + totalLen += gl.size(); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java index 3b2366034489..0c2be211a532 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java @@ -252,6 +252,11 @@ public List getSpecialGroups(Context context, HttpServletRequest request) return groups; } + @Override + public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return true; + } + @Override public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) throws SQLException { From 911a7a394f22f68589ab7c213d83e12e205ecc56 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Wed, 18 Oct 2023 12:45:00 +0200 Subject: [PATCH 542/686] [DURACOM-192] Added test (cherry picked from commit fa39251071156a6eeb1030000f50761663e128e2) --- .../rest/AuthenticationRestControllerIT.java | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java index 69e70dbb087e..1da807ad7180 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static java.lang.Thread.sleep; +import static org.dspace.app.rest.matcher.GroupMatcher.matchGroupWithName; import static org.dspace.app.rest.utils.RegexUtils.REGEX_UUID; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; @@ -1641,6 +1642,71 @@ public void testOrcidLoginURL() throws Exception { } } + @Test + public void testAreSpecialGroupsApplicable() throws Exception { + context.turnOffAuthorisationSystem(); + + GroupBuilder.createGroup(context) + .withName("specialGroupPwd") + .build(); + GroupBuilder.createGroup(context) + .withName("specialGroupShib") + .build(); + + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", SHIB_AND_PASS); + configurationService.setProperty("authentication-password.login.specialgroup", "specialGroupPwd"); + configurationService.setProperty("authentication-shibboleth.role.faculty", "specialGroupShib"); + configurationService.setProperty("authentication-shibboleth.default-roles", "faculty"); + + context.restoreAuthSystemState(); + + String passwordToken = getAuthToken(eperson.getEmail(), password); + + getClient(passwordToken).perform(get("/api/authn/status").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("password"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupPwd")))); + + getClient(passwordToken).perform(get("/api/authn/status/specialGroups").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupPwd")))); + + String shibToken = getClient().perform(post("/api/authn/login") + .requestAttr("SHIB-MAIL", eperson.getEmail()) + .requestAttr("SHIB-SCOPED-AFFILIATION", "faculty;staff")) + .andExpect(status().isOk()) + .andReturn().getResponse().getHeader(AUTHORIZATION_HEADER).replace(AUTHORIZATION_TYPE, ""); + + getClient(shibToken).perform(get("/api/authn/status").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupShib")))); + + getClient(shibToken).perform(get("/api/authn/status/specialGroups").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder(matchGroupWithName("specialGroupShib")))); + } + // Get a short-lived token based on an active login token private String getShortLivedToken(String loginToken) throws Exception { ObjectMapper mapper = new ObjectMapper(); From fd13ee7cfdcdf0d40edf914b1ff21d880713cc91 Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 27 Oct 2023 15:50:26 +0200 Subject: [PATCH 543/686] 107891: Cache administrator group (cherry picked from commit 1e82ca7998c45bd628cd84cefce9ae3f0a0ce046) --- .../dspace/authorize/AuthorizeServiceImpl.java | 2 +- .../src/main/java/org/dspace/core/Context.java | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index fc438c234cda..5dffe5fdfc1f 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -451,7 +451,7 @@ public boolean isAdmin(Context c, EPerson e) throws SQLException { if (e == null) { return false; // anonymous users can't be admins.... } else { - return groupService.isMember(c, e, Group.ADMIN); + return groupService.isMember(c, e, c.getAdminGroup()); } } diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 09b9c4a32dd3..02a3fee09f8a 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -128,6 +128,11 @@ public class Context implements AutoCloseable { private DBConnection dbConnection; + /** + * The default administrator group + */ + private Group adminGroup; + public enum Mode { READ_ONLY, READ_WRITE, @@ -960,4 +965,15 @@ public void setAuthenticationMethod(final String authenticationMethod) { public boolean isContextUserSwitched() { return currentUserPreviousState != null; } + + /** + * Returns the default "Administrator" group for DSpace administrators. + * The result is cached in the 'adminGroup' field, so it is only looked up once. + * This is done to improve performance, as this method is called quite often. + */ + public Group getAdminGroup() throws SQLException { + return (adminGroup == null) ? EPersonServiceFactory.getInstance() + .getGroupService() + .findByName(this, Group.ADMIN) : adminGroup; + } } From 5b57a4cc30e0ff4e328ad988efca9e282a42e663 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:37:23 +0200 Subject: [PATCH 544/686] [DSC-1321] add subscription.url config property --- dspace/config/dspace.cfg | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index d78479c2f23d..ffec78c450a2 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -47,6 +47,9 @@ dspace.name = DSpace at My University # Default language for metadata values default.language = en_US +# Url of subscriptions page +subscription.url = ${dspace.ui.url}/subscriptions + # Solr server/webapp. # DSpace uses Solr for all search/browse capability (and for usage statistics). # Since DSpace 7, SOLR must be installed as a stand-alone service. From 468c969661c1878be330ce990f810b681737e434 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:38:06 +0200 Subject: [PATCH 545/686] [DSC-1321] edit subscriptions_content email template --- dspace/config/emails/subscriptions_content | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/dspace/config/emails/subscriptions_content b/dspace/config/emails/subscriptions_content index fc186abbb0b9..6dfc6f88410f 100644 --- a/dspace/config/emails/subscriptions_content +++ b/dspace/config/emails/subscriptions_content @@ -1,19 +1,23 @@ ## E-mail sent to designated address about updates on subscribed items ## -## Parameters: {0} Collections updates -## {1} Communities updates -## {2} Items updates +## Parameters: {0} Link to subscriptions page +## {1} Collections updates block +## {2} Communities updates block +## {3} Entity updates block This email is sent from DSpace-CRIS based on the chosen subscription preferences. +You can manage your subscription preferences from ${params[0]} Communities ----------- -List of changed items : ${params[0]} +${params[1]} + Collections ----------- -List of changed items : ${params[1]} +${params[2]} + -Items +Entities ----- -List of changed items : ${params[2]} \ No newline at end of file +${params[3]} \ No newline at end of file From c732d502625b5306076c4feba19a9aadcb6ab2e7 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:40:53 +0200 Subject: [PATCH 546/686] [DSC-1321] Add class SubscriptionItem to prevent memory issues --- .../subscriptions/SubscriptionItem.java | 74 +++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java new file mode 100644 index 000000000000..3254635b015f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionItem.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +import static java.util.stream.Collectors.toMap; + +import java.util.List; +import java.util.Map; + +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.discovery.IndexableObject; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +public class SubscriptionItem { + + private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + + private String name; + private String url; + private Map itemUrlsByItemName; + + public SubscriptionItem(String name, String url, Map itemUrlsByItemName) { + this.name = name; + this.url = url; + this.itemUrlsByItemName = itemUrlsByItemName; + } + + @SuppressWarnings({ "rawtypes" }) + static SubscriptionItem fromItem(DSpaceObject dSpaceObject, List relatedItems) { + return new SubscriptionItem( + dSpaceObject.getName(), + buildUrlForItem(dSpaceObject.getHandle()), + relatedItems.stream() + .map(obj -> (Item) obj.getIndexedObject()) + .collect(toMap(Item::getName, item -> buildUrlForItem(item.getHandle()))) + ); + } + + private static String buildUrlForItem(String handle) { + return configurationService.getProperty("dspace.ui.url") + "/handle/" + handle; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Map getItemUrlsByItemName() { + return itemUrlsByItemName; + } + + public void setItemUrlsByItemName(Map itemUrlsByItemName) { + this.itemUrlsByItemName = itemUrlsByItemName; + } +} From 0e99a192439a977fc9fa1abacd0978e9f760cf7f Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:41:00 +0200 Subject: [PATCH 547/686] [DSC-1321] refactor and change logic of SubscriptionEmailNotificationServiceImpl --- .../SubscriptionEmailNotificationService.java | 4 +- ...scriptionEmailNotificationServiceImpl.java | 199 ++++++++++++------ 2 files changed, 131 insertions(+), 72 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java index 95272235095a..7a7c36491278 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java @@ -7,7 +7,7 @@ */ package org.dspace.subscriptions; -import java.util.Set; +import java.util.List; import org.dspace.core.Context; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -32,6 +32,6 @@ public interface SubscriptionEmailNotificationService { /** * returns a set of supported SubscriptionTypes */ - public Set getSupportedSubscriptionTypes(); + public List getSupportedSubscriptionTypes(); } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java index 2a30b89af3f5..78024bfdd640 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java @@ -7,10 +7,12 @@ */ package org.dspace.subscriptions; +import static org.dspace.content.Item.ANY; import static org.dspace.core.Constants.COLLECTION; import static org.dspace.core.Constants.COMMUNITY; import static org.dspace.core.Constants.ITEM; import static org.dspace.core.Constants.READ; +import static org.dspace.subscriptions.SubscriptionItem.fromItem; import java.sql.SQLException; import java.util.ArrayList; @@ -18,16 +20,18 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.app.metrics.CrisMetrics; +import org.dspace.app.metrics.service.CrisMetricsService; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.eperson.EPerson; @@ -36,7 +40,6 @@ import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.subscriptions.service.DSpaceObjectUpdates; -import org.dspace.subscriptions.service.SubscriptionGenerator; import org.springframework.beans.factory.annotation.Autowired; /** @@ -48,87 +51,114 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma private static final Logger log = LogManager.getLogger(SubscriptionEmailNotificationServiceImpl.class); - private Map contentUpdates = new HashMap<>(); - @SuppressWarnings("rawtypes") - private Map subscriptionType2generators = new HashMap<>(); + private final Map contentUpdates; + private final ContentGenerator contentGenerator; + private final StatisticsGenerator statisticsGenerator; + private final List supportedSubscriptionTypes; @Autowired private AuthorizeService authorizeService; @Autowired private SubscribeService subscribeService; + @Autowired + private CrisMetricsService crisMetricsService; - @SuppressWarnings("rawtypes") public SubscriptionEmailNotificationServiceImpl(Map contentUpdates, - Map subscriptionType2generators) { + ContentGenerator contentGenerator, + StatisticsGenerator statisticsGenerator, + List supportedSubscriptionTypes) { this.contentUpdates = contentUpdates; - this.subscriptionType2generators = subscriptionType2generators; + this.contentGenerator = contentGenerator; + this.statisticsGenerator = statisticsGenerator; + this.supportedSubscriptionTypes = supportedSubscriptionTypes; } - @SuppressWarnings({ "rawtypes", "unchecked" }) public void perform(Context context, DSpaceRunnableHandler handler, String subscriptionType, String frequency) { - List communityItems = new ArrayList<>(); - List collectionsItems = new ArrayList<>(); - List items = new ArrayList<>(); + // Verify if subscriptionType is "content" or "subscription" + if (supportedSubscriptionTypes.get(0).equals(subscriptionType)) { + performForContent(context, handler, subscriptionType, frequency); + } else if (supportedSubscriptionTypes.get(1).equals(subscriptionType)) { + performForStatistics(context, subscriptionType, frequency); + } else { + throw new IllegalArgumentException( + "Currently this SubscriptionType:" + subscriptionType + " is not supported!"); + } + } + + @SuppressWarnings({ "rawtypes" }) + private void performForContent(Context context, DSpaceRunnableHandler handler, + String subscriptionType, String frequency) { try { List subscriptions = - findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); - // Here is verified if SubscriptionType is "content" Or "statistics" as them are configured - if (subscriptionType2generators.keySet().contains(subscriptionType)) { - // the list of the person who has subscribed - int iterator = 0; - for (Subscription subscription : subscriptions) { - DSpaceObject dSpaceObject = subscription.getDSpaceObject(); - EPerson ePerson = subscription.getEPerson(); - - if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) { - iterator++; - continue; - } + findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); + List communityItems = new ArrayList<>(); + List collectionsItems = new ArrayList<>(); + Map> entityItemsByEntityType = new HashMap<>(); + int iterator = 0; - if (dSpaceObject.getType() == COMMUNITY) { - List indexableCommunityItems = contentUpdates - .get(Community.class.getSimpleName().toLowerCase()) - .findUpdates(context, dSpaceObject, frequency); - communityItems.addAll(getItems(context, ePerson, indexableCommunityItems)); - } else if (dSpaceObject.getType() == COLLECTION) { - List indexableCollectionItems = contentUpdates - .get(Collection.class.getSimpleName().toLowerCase()) - .findUpdates(context, dSpaceObject, frequency); - collectionsItems.addAll(getItems(context, ePerson, indexableCollectionItems)); - } else if (dSpaceObject.getType() == ITEM) { - List indexableCollectionItems = contentUpdates - .get(Item.class.getSimpleName().toLowerCase()) - .findUpdates(context, dSpaceObject, frequency); - items.addAll(getItems(context, ePerson, indexableCollectionItems)); - } else { + for (Subscription subscription : subscriptions) { + DSpaceObject dSpaceObject = subscription.getDSpaceObject(); + EPerson ePerson = subscription.getEPerson(); + + if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) { + iterator++; + continue; + } + + switch (dSpaceObject.getType()) { + case COMMUNITY: + List indexableCommunityItems = getItems( + context, ePerson, + contentUpdates.get(Community.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency) + ); + communityItems.add(fromItem(dSpaceObject, indexableCommunityItems)); + break; + case COLLECTION: + List indexableCollectionItems = getItems( + context, ePerson, + contentUpdates.get(Collection.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency) + ); + collectionsItems.add(fromItem(dSpaceObject, indexableCollectionItems)); + break; + case ITEM: + List indexableEntityItems = getItems( + context, ePerson, contentUpdates.get(Item.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency) + ); + String dspaceType = ContentServiceFactory + .getInstance().getDSpaceObjectService(dSpaceObject) + .getMetadataFirstValue(dSpaceObject, "dspace", "entity", "type", ANY); + + entityItemsByEntityType.computeIfAbsent(dspaceType, k -> new ArrayList<>()) + .add(fromItem(dSpaceObject, indexableEntityItems)); + break; + default: log.warn("found an invalid DSpace Object type ({}) among subscriptions to send", dSpaceObject.getType()); continue; - } + } - if (iterator < subscriptions.size() - 1) { - // as the subscriptions are ordered by eperson id, so we send them by ePerson - if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { - iterator++; - continue; - } else { - subscriptionType2generators.get(subscriptionType) - .notifyForSubscriptions(context, ePerson, communityItems, - collectionsItems, items); - communityItems.clear(); - collectionsItems.clear(); - } + if (iterator < subscriptions.size() - 1) { + // as the subscriptions are ordered by eperson id, so we send them by ePerson + if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { + iterator++; + continue; } else { - //in the end of the iteration - subscriptionType2generators.get(subscriptionType) - .notifyForSubscriptions(context, ePerson, communityItems, - collectionsItems, items); + contentGenerator.notifyForSubscriptions( + ePerson, communityItems, collectionsItems, entityItemsByEntityType + ); + communityItems.clear(); + collectionsItems.clear(); } - iterator++; + } else { + //in the end of the iteration + contentGenerator.notifyForSubscriptions( + ePerson, communityItems, collectionsItems, entityItemsByEntityType + ); } - } else { - throw new IllegalArgumentException("Currently this SubscriptionType:" + subscriptionType + - " is not supported!"); + iterator++; } } catch (Exception e) { log.error(e.getMessage(), e); @@ -137,14 +167,43 @@ public void perform(Context context, DSpaceRunnableHandler handler, String subsc } } + private void performForStatistics(Context context, String subscriptionType, String frequency) { + List subscriptions = + findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); + List crisMetricsList = new ArrayList<>(); + int iterator = 0; + + for (Subscription subscription : subscriptions) { + EPerson ePerson = subscription.getEPerson(); + DSpaceObject dSpaceObject = subscription.getDSpaceObject(); + try { + crisMetricsList.addAll(crisMetricsService.findAllByDSO(context, dSpaceObject)); + } catch (Exception e) { + log.error(e.getMessage()); + } + if (iterator < subscriptions.size() - 1) { + if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { + iterator++; + continue; + } else { + statisticsGenerator.notifyForSubscriptions(context, ePerson, crisMetricsList); + } + } else { + //in the end of the iteration + statisticsGenerator.notifyForSubscriptions(context, ePerson, crisMetricsList); + } + iterator++; + } + } + @SuppressWarnings("rawtypes") private List getItems(Context context, EPerson ePerson, List indexableItems) throws SQLException { List items = new ArrayList(); - for (IndexableObject indexableitem : indexableItems) { - Item item = (Item) indexableitem.getIndexedObject(); + for (IndexableObject indexableItem : indexableItems) { + Item item = (Item) indexableItem.getIndexedObject(); if (authorizeService.authorizeActionBoolean(context, ePerson, item, READ, true)) { - items.add(indexableitem); + items.add(indexableItem); } } return items; @@ -157,25 +216,25 @@ private List getItems(Context context, EPerson ePerson, List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, String subscriptionType, String frequency) { try { return subscribeService.findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, - frequency) + frequency) .stream() .sorted(Comparator.comparing(s -> s.getEPerson().getID())) .collect(Collectors.toList()); } catch (SQLException e) { log.error(e.getMessage(), e); } - return new ArrayList(); + return new ArrayList<>(); } @Override - public Set getSupportedSubscriptionTypes() { - return subscriptionType2generators.keySet(); + public List getSupportedSubscriptionTypes() { + return supportedSubscriptionTypes; } } From 9bfe6c2df1328da8d7358e880f02bb3bf57abf79 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:41:36 +0200 Subject: [PATCH 548/686] [DSC-1321] small chane in StatisticsGenerator --- .../org/dspace/subscriptions/StatisticsGenerator.java | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java index c1f9be368e27..842ff9aa0e8f 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java @@ -27,7 +27,6 @@ import org.dspace.core.Email; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; -import org.dspace.subscriptions.service.SubscriptionGenerator; import org.springframework.beans.factory.annotation.Autowired; @@ -38,19 +37,16 @@ * * @author Alba Aliu */ -public class StatisticsGenerator implements SubscriptionGenerator { +public class StatisticsGenerator { private static final Logger log = LogManager.getLogger(StatisticsGenerator.class); @Autowired private ConfigurationService configurationService; - @Override - public void notifyForSubscriptions(Context c, EPerson ePerson, List crisMetricsList, - List crisMetricsList1, List crisMetricsList2) { - // find statistics for all the subscribed objects + public void notifyForSubscriptions(Context c, EPerson ePerson, List crisMetricsList) { try { // send the notification to the user - if (Objects.nonNull(ePerson) && crisMetricsList.size() > 0) { + if (Objects.nonNull(ePerson) && !crisMetricsList.isEmpty()) { Email email = new Email(); String name = configurationService.getProperty("dspace.name"); File attachment = generateExcel(crisMetricsList, c); From 3c8a912acc57ef99cba5ba9a9a79fcf8d39bbb4b Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:42:11 +0200 Subject: [PATCH 549/686] [DSC-1321] Rewrite email composing in ContentGenerator --- .../subscriptions/ContentGenerator.java | 72 +++++++++---------- 1 file changed, 32 insertions(+), 40 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java index f60ac3c98edb..e36e2c42c79b 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java @@ -11,27 +11,20 @@ import static org.apache.commons.lang.StringUtils.EMPTY; import java.io.ByteArrayOutputStream; -import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; -import java.util.Optional; -import javax.annotation.Resource; +import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.content.Item; -import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.discovery.IndexableObject; import org.dspace.eperson.EPerson; -import org.dspace.subscriptions.service.SubscriptionGenerator; -import org.springframework.beans.factory.annotation.Autowired; - +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -39,31 +32,30 @@ * which will handle the logic of sending the emails * in case of 'content' subscriptionType */ -@SuppressWarnings("rawtypes") -public class ContentGenerator implements SubscriptionGenerator { +public class ContentGenerator { private final Logger log = LogManager.getLogger(ContentGenerator.class); + private final ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); - @SuppressWarnings("unchecked") - @Resource(name = "entityDissemination") - private Map entityType2Disseminator = new HashMap(); - - @Autowired - private ItemService itemService; - @Override - public void notifyForSubscriptions(Context context, EPerson ePerson, - List indexableComm, - List indexableColl, - List indexableItems) { + public void notifyForSubscriptions(EPerson ePerson, + List indexableComm, + List indexableColl, + Map> indexableEntityByType) { try { if (Objects.nonNull(ePerson)) { Locale supportedLocale = I18nUtil.getEPersonLocale(ePerson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "subscriptions_content")); email.addRecipient(ePerson.getEmail()); - email.addArgument(generateBodyMail(context, indexableComm)); - email.addArgument(generateBodyMail(context, indexableColl)); - email.addArgument(generateBodyMail(context, indexableItems)); + email.addArgument(configurationService.getProperty("subscription.url")); + email.addArgument(generateBodyMail("Community", indexableComm)); + email.addArgument(generateBodyMail("Collection", indexableColl)); + email.addArgument( + indexableEntityByType.entrySet().stream() + .map(entry -> generateBodyMail(entry.getKey(), entry.getValue())) + .collect(Collectors.joining("\n\n")) + ); email.send(); } } catch (Exception e) { @@ -72,18 +64,22 @@ public void notifyForSubscriptions(Context context, EPerson ePerson, } } - private String generateBodyMail(Context context, List indexableObjects) { + private String generateBodyMail(String type, List subscriptionItems) { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); - out.write("\n".getBytes(UTF_8)); - if (indexableObjects.size() > 0) { - for (IndexableObject indexableObject : indexableObjects) { + if (!subscriptionItems.isEmpty()) { + out.write(("\nYou have " + subscriptionItems.size() + " subscription(s) active to type " + type + "\n") + .getBytes(UTF_8)); + for (SubscriptionItem item : subscriptionItems) { out.write("\n".getBytes(UTF_8)); - Item item = (Item) indexableObject.getIndexedObject(); - String entityType = itemService.getEntityTypeLabel(item); - Optional.ofNullable(entityType2Disseminator.get(entityType)) - .orElseGet(() -> entityType2Disseminator.get("Item")) - .disseminate(context, item, out); + out.write("List of new content for the\n".getBytes(UTF_8)); + out.write((type + " " + item.getName() + " - " + item.getUrl() + "\n") + .getBytes(UTF_8)); + + for (Entry entry : item.getItemUrlsByItemName().entrySet()) { + out.write("\n".getBytes(UTF_8)); + out.write((entry.getKey() + " - " + entry.getValue()).getBytes(UTF_8)); + } } return out.toString(); } else { @@ -96,8 +92,4 @@ private String generateBodyMail(Context context, List indexable return EMPTY; } - public void setEntityType2Disseminator(Map entityType2Disseminator) { - this.entityType2Disseminator = entityType2Disseminator; - } - } From e0f261fd55b9f9f6bff09b522d58b6441ae4da81 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:43:26 +0200 Subject: [PATCH 550/686] [DSC-1321] Remove now redundant interface SubscriptionGenerator since ContentGenerator and StatisticsGenerator have no common methods --- .../service/SubscriptionGenerator.java | 25 ------------------- 1 file changed, 25 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java deleted file mode 100644 index 994ada75b61b..000000000000 --- a/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java +++ /dev/null @@ -1,25 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.subscriptions.service; - -import java.util.List; - -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; - -/** - * Interface Class which will be used to send email notifications to ePerson - * containing information for all list of objects. - * - * @author Alba Aliu - */ -public interface SubscriptionGenerator { - - public void notifyForSubscriptions(Context c, EPerson ePerson, List comm, List coll, List items); - -} \ No newline at end of file From b1365e0ab1bef9229575cba02ff4e9cfb7176563 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Fri, 10 Nov 2023 16:43:40 +0200 Subject: [PATCH 551/686] [DSC-1321] Edit subscriptions_email_configuration.xml --- .../api/subscriptions_email_configuration.xml | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/dspace/config/spring/api/subscriptions_email_configuration.xml b/dspace/config/spring/api/subscriptions_email_configuration.xml index c946a29bec7e..d64a78d5e363 100644 --- a/dspace/config/spring/api/subscriptions_email_configuration.xml +++ b/dspace/config/spring/api/subscriptions_email_configuration.xml @@ -14,16 +14,8 @@ http://www.springframework.org/schema/util/spring-util.xsd"> - - - - - - - + + @@ -32,10 +24,16 @@ + + + content + statistics + + - + + + From 7df4a2b1c629726ee8b62b198e3a7950ae898688 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 5 Oct 2023 16:15:43 -0500 Subject: [PATCH 556/686] Add isNotMemberOf for groups, including unit and integration tests (cherry picked from commit 9d271b24b9721741a53142a690b86287efb738fe) --- .../org/dspace/eperson/GroupServiceImpl.java | 54 ++++- .../java/org/dspace/eperson/dao/GroupDAO.java | 32 +++ .../dspace/eperson/dao/impl/GroupDAOImpl.java | 36 ++++ .../dspace/eperson/service/GroupService.java | 62 ++++-- .../java/org/dspace/eperson/GroupTest.java | 103 ++++++++++ .../rest/repository/GroupRestRepository.java | 29 +++ .../app/rest/GroupRestRepositoryIT.java | 186 ++++++++++++++++++ 7 files changed, 478 insertions(+), 24 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index c2f2ea68bdd7..b8d8c75d0f2e 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -460,17 +460,17 @@ public List findAll(Context context, List metadataSortFiel } @Override - public List search(Context context, String groupIdentifier) throws SQLException { - return search(context, groupIdentifier, -1, -1); + public List search(Context context, String query) throws SQLException { + return search(context, query, -1, -1); } @Override - public List search(Context context, String groupIdentifier, int offset, int limit) throws SQLException { + public List search(Context context, String query, int offset, int limit) throws SQLException { List groups = new ArrayList<>(); - UUID uuid = UUIDUtils.fromString(groupIdentifier); + UUID uuid = UUIDUtils.fromString(query); if (uuid == null) { //Search by group name - groups = groupDAO.findByNameLike(context, groupIdentifier, offset, limit); + groups = groupDAO.findByNameLike(context, query, offset, limit); } else { //Search by group id Group group = find(context, uuid); @@ -483,12 +483,12 @@ public List search(Context context, String groupIdentifier, int offset, i } @Override - public int searchResultCount(Context context, String groupIdentifier) throws SQLException { + public int searchResultCount(Context context, String query) throws SQLException { int result = 0; - UUID uuid = UUIDUtils.fromString(groupIdentifier); + UUID uuid = UUIDUtils.fromString(query); if (uuid == null) { //Search by group name - result = groupDAO.countByNameLike(context, groupIdentifier); + result = groupDAO.countByNameLike(context, query); } else { //Search by group id Group group = find(context, uuid); @@ -500,6 +500,44 @@ public int searchResultCount(Context context, String groupIdentifier) throws SQL return result; } + @Override + public List searchNonMembers(Context context, String query, Group excludeParentGroup, + int offset, int limit) throws SQLException { + List groups = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by group name + groups = groupDAO.findByNameLikeAndNotMember(context, query, excludeParentGroup, offset, limit); + } else if (!uuid.equals(excludeParentGroup.getID())) { + // Search by group id + Group group = find(context, uuid); + // Verify it is NOT a member of the given excludeParentGroup before adding + if (group != null && !isMember(excludeParentGroup, group)) { + groups.add(group); + } + } + + return groups; + } + + @Override + public int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by group name + result = groupDAO.countByNameLikeAndNotMember(context, query, excludeParentGroup); + } else if (!uuid.equals(excludeParentGroup.getID())) { + // Search by group id + Group group = find(context, uuid); + // Verify it is NOT a member of the given excludeParentGroup before adding + if (group != null && !isMember(excludeParentGroup, group)) { + result = 1; + } + } + return result; + } + @Override public void delete(Context context, Group group) throws SQLException { if (group.isPermanent()) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java index fd56fe9bd1d0..9742e1611e5a 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java @@ -135,6 +135,38 @@ List findAll(Context context, List metadataSortFields, int */ int countByNameLike(Context context, String groupName) throws SQLException; + /** + * Search all groups via their name (fuzzy match), limited to those groups which are NOT a member of the given + * parent group. This may be used to search across groups which are valid to add to the given parent group. + *

    + * NOTE: The parent group itself is also excluded from the search. + * + * @param context The DSpace context + * @param groupName Group name to fuzzy match against. + * @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned. + * @param offset Offset to use for pagination (-1 to disable) + * @param limit The maximum number of results to return (-1 to disable) + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + List findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent, + int offset, int limit) throws SQLException; + + /** + * Count number of groups that match a given name (fuzzy match), limited to those groups which are NOT a member of + * the given parent group. This may be used (with findByNameLikeAndNotMember()) to search across groups which are + * valid to add to the given parent group. + *

    + * NOTE: The parent group itself is also excluded from the count. + * + * @param context The DSpace context + * @param groupName Group name to fuzzy match against. + * @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned. + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException; + /** * Find a group by its name and the membership of the given EPerson * diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java index ad9c7b54fdb5..6aea9ecd8d67 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java @@ -164,6 +164,41 @@ public int countByNameLike(final Context context, final String groupName) throws return count(query); } + @Override + public List findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent, + int offset, int limit) throws SQLException { + Query query = createQuery(context, + "FROM Group " + + "WHERE lower(name) LIKE lower(:group_name) " + + "AND id != :parent_id " + + "AND (from Group g where g.id = :parent_id) not in elements (parentGroups)"); + query.setParameter("parent_id", excludeParent.getID()); + query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%"); + + if (0 <= offset) { + query.setFirstResult(offset); + } + if (0 <= limit) { + query.setMaxResults(limit); + } + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } + + @Override + public int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException { + Query query = createQuery(context, + "SELECT count(*) FROM Group " + + "WHERE lower(name) LIKE lower(:group_name) " + + "AND id != :parent_id " + + "AND (from Group g where g.id = :parent_id) not in elements (parentGroups)"); + query.setParameter("parent_id", excludeParent.getID()); + query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%"); + + return count(query); + } + @Override public void delete(Context context, Group group) throws SQLException { Query query = getHibernateSession(context) @@ -213,6 +248,7 @@ public List findByParent(Context context, Group parent, int pageSize, int return list(query); } + @Override public int countByParent(Context context, Group parent) throws SQLException { Query query = createQuery(context, "SELECT count(g) FROM Group g JOIN g.parentGroups pg " + "WHERE pg.id = :parent_id"); diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java index ef3949149f14..0be2f47a61eb 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java @@ -261,37 +261,67 @@ public List findAll(Context context, List metadataSortFiel public List findAll(Context context, int sortField) throws SQLException; /** - * Find the groups that match the search query across eperson_group_id or name + * Find the Groups that match the query across both Group name and Group ID. This is an unpaginated search, + * which means it will load all matching groups into memory at once. This may provide POOR PERFORMANCE when a large + * number of groups are matched. * - * @param context DSpace context - * @param groupIdentifier The group name or group ID - * @return array of Group objects + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @return List of matching Group objects * @throws SQLException if error */ - public List search(Context context, String groupIdentifier) throws SQLException; + List search(Context context, String query) throws SQLException; /** - * Find the groups that match the search query across eperson_group_id or name + * Find the Groups that match the query across both Group name and Group ID. This method supports pagination, + * which provides better performance than the above non-paginated search() method. * - * @param context DSpace context - * @param groupIdentifier The group name or group ID - * @param offset Inclusive offset - * @param limit Maximum number of matches returned - * @return array of Group objects + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching Group objects * @throws SQLException if error */ - public List search(Context context, String groupIdentifier, int offset, int limit) throws SQLException; + List search(Context context, String query, int offset, int limit) throws SQLException; /** - * Returns the total number of groups returned by a specific query, without the overhead - * of creating the Group objects to store the results. + * Returns the total number of Groups returned by a specific query. Search is performed based on Group name + * and Group ID. May be used with search() above to support pagination of matching Groups. * * @param context DSpace context - * @param query The search string + * @param query The search string used to search across group name or group ID * @return the number of groups matching the query * @throws SQLException if error */ - public int searchResultCount(Context context, String query) throws SQLException; + int searchResultCount(Context context, String query) throws SQLException; + + /** + * Find the groups that match the search query which are NOT currently members (subgroups) + * of the given parentGroup + * + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param excludeParentGroup Parent group to exclude results from + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching Group objects + * @throws SQLException if error + */ + List searchNonMembers(Context context, String query, Group excludeParentGroup, + int offset, int limit) throws SQLException; + + /** + * Returns the total number of groups that match the search query which are NOT currently members (subgroups) + * of the given parentGroup. Can be used with searchNonMembers() to support pagination. + * + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param excludeParentGroup Parent group to exclude results from + * @return the number of Groups matching the query + * @throws SQLException if error + */ + int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException; /** * Return true if group has no direct or indirect members diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index 0eaacb6194ed..fddcabe4b038 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -680,6 +680,109 @@ public void findAndCountByParent() throws SQLException, AuthorizeException, IOEx } } + @Test + // Tests searchNonMembers() and searchNonMembersCount() + // NOTE: This does not test pagination as that is tested in GroupRestRepositoryIT in server-webapp + public void searchAndCountNonMembers() throws SQLException, AuthorizeException, IOException { + // Create a parent group with 2 child groups + Group parentGroup = createGroup("Some Parent Group"); + Group someStaffGroup = createGroup("Some Other Staff"); + Group someStudentsGroup = createGroup("Some Students"); + groupService.addMember(context, parentGroup, someStaffGroup); + groupService.addMember(context, parentGroup, someStudentsGroup); + groupService.update(context, parentGroup); + + // Create a separate parent which is not a member of the first & add two child groups to it + Group studentsNotInParentGroup = createGroup("Students not in Parent"); + Group otherStudentsNotInParentGroup = createGroup("Other Students"); + Group someOtherStudentsNotInParentGroup = createGroup("Some Other Students"); + groupService.addMember(context, studentsNotInParentGroup, otherStudentsNotInParentGroup); + groupService.addMember(context, studentsNotInParentGroup, someOtherStudentsNotInParentGroup); + groupService.update(context, studentsNotInParentGroup); + + try { + // Assert that all Groups *not* in parent group match an empty search + List notInParent = Arrays.asList(studentsNotInParentGroup, otherStudentsNotInParentGroup, + someOtherStudentsNotInParentGroup); + List nonMembersSearch = groupService.searchNonMembers(context, "", parentGroup, -1, -1); + // NOTE: Because others unit tests create groups, this search will return an undetermined number of results. + // Therefore, we just verify that our expected groups are included and others are NOT included. + assertTrue(nonMembersSearch.containsAll(notInParent)); + // Verify it does NOT contain members of parentGroup + assertFalse(nonMembersSearch.contains(someStaffGroup)); + assertFalse(nonMembersSearch.contains(someStudentsGroup)); + // Verify it also does NOT contain the parentGroup itself + assertFalse(nonMembersSearch.contains(parentGroup)); + // Verify the count for empty search matches the size of the search results + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "", parentGroup)); + + // Assert a search on "Students" matches all those same groups (as they all include that word in their name) + nonMembersSearch = groupService.searchNonMembers(context, "Students", parentGroup, -1, -1); + assertTrue(nonMembersSearch.containsAll(notInParent)); + //Verify an existing member group with "Students" in its name does NOT get returned + assertFalse(nonMembersSearch.contains(someStudentsGroup)); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, "Students", parentGroup)); + + + // Assert a search on "other" matches just two groups + // (this also tests search is case insensitive) + nonMembersSearch = groupService.searchNonMembers(context, "other", parentGroup, -1, -1); + assertTrue(nonMembersSearch.containsAll( + Arrays.asList(otherStudentsNotInParentGroup, someOtherStudentsNotInParentGroup))); + // Verify an existing member group with "Other" in its name does NOT get returned + assertFalse(nonMembersSearch.contains(someStaffGroup)); + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "other", parentGroup)); + + // Assert a search on "Parent" matches just one group + nonMembersSearch = groupService.searchNonMembers(context, "Parent", parentGroup, -1, -1); + assertTrue(nonMembersSearch.contains(studentsNotInParentGroup)); + // Verify Parent Group itself does NOT get returned + assertFalse(nonMembersSearch.contains(parentGroup)); + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "Parent", parentGroup)); + + // Assert a UUID search matching a non-member group will return just that one group + nonMembersSearch = groupService.searchNonMembers(context, + someOtherStudentsNotInParentGroup.getID().toString(), + parentGroup, -1, -1); + assertEquals(1, nonMembersSearch.size()); + assertTrue(nonMembersSearch.contains(someOtherStudentsNotInParentGroup)); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, + someOtherStudentsNotInParentGroup.getID().toString(), + parentGroup)); + + // Assert a UUID search matching an EXISTING member will return NOTHING + // (as this group is excluded from the search) + nonMembersSearch = groupService.searchNonMembers(context, someStudentsGroup.getID().toString(), + parentGroup,-1, -1); + assertEquals(0, nonMembersSearch.size()); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, someStudentsGroup.getID().toString(), + parentGroup)); + + // Assert a UUID search matching Parent Group *itself* will return NOTHING + // (as this group is excluded from the search) + nonMembersSearch = groupService.searchNonMembers(context, parentGroup.getID().toString(), + parentGroup,-1, -1); + assertEquals(0, nonMembersSearch.size()); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, parentGroup.getID().toString(), + parentGroup)); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, parentGroup); + groupService.delete(context, someStaffGroup); + groupService.delete(context, someStudentsGroup); + groupService.delete(context, studentsNotInParentGroup); + groupService.delete(context, otherStudentsNotInParentGroup); + groupService.delete(context, someOtherStudentsNotInParentGroup); + context.restoreAuthSystemState(); + } + + } + protected Group createGroup(String name) throws SQLException, AuthorizeException { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java index 103abdcae645..9eb92d8e6fe5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java @@ -148,6 +148,35 @@ public Page findByMetadata(@Parameter(value = "query", required = tru } } + /** + * Find the Groups matching the query parameter which are NOT a member of the given parent Group. + * The search is delegated to the + * {@link GroupService#search(Context, String, int, int)} method + * + * @param groupUUID the parent group UUID + * @param query is the *required* query string + * @param pageable contains the pagination information + * @return a Page of GroupRest instances matching the user query + */ + @PreAuthorize("hasAuthority('ADMIN') || hasAuthority('MANAGE_ACCESS_GROUP')") + @SearchRestMethod(name = "isNotMemberOf") + public Page findIsNotMemberOf(@Parameter(value = "group", required = true) UUID groupUUID, + @Parameter(value = "query", required = true) String query, + Pageable pageable) { + + try { + Context context = obtainContext(); + Group excludeParentGroup = gs.find(context, groupUUID); + long total = gs.searchNonMembersCount(context, query, excludeParentGroup); + List groups = gs.searchNonMembers(context, query, excludeParentGroup, + Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getPageSize())); + return converter.toRestPage(groups, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + @Override public Class getDomainClass() { return GroupRest.class; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java index 797657794a6e..4300c987589a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/GroupRestRepositoryIT.java @@ -3242,6 +3242,192 @@ public void subgroupPaginationTest() throws Exception { .andExpect(jsonPath("$.page.totalElements", is(5))); } + // Test of /groups/search/isNotMemberOf pagination + // NOTE: Additional tests of 'isNotMemberOf' search functionality can be found in GroupTest in 'dspace-api' + @Test + public void searchIsNotMemberOfPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test Parent group") + .build(); + // Create two subgroups of main group. These SHOULD NOT be included in pagination + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test group 1") + .build(); + GroupBuilder.createGroup(context) + .withParent(group) + .withName("Test group 2") + .build(); + + // Create five non-member groups. These SHOULD be included in pagination + GroupBuilder.createGroup(context) + .withName("Test group 3") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 4") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 5") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 6") + .build(); + GroupBuilder.createGroup(context) + .withName("Test group 7") + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "test group") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.groups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "test group") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.groups").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "test group") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.everyItem( + hasJsonPath("$.type", is("group"))) + )) + .andExpect(jsonPath("$._embedded.groups").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + + @Test + public void searchIsNotMemberOfByUUID() throws Exception { + context.turnOffAuthorisationSystem(); + // Create two groups which have no parent group + Group group1 = GroupBuilder.createGroup(context) + .withName("Test Parent group 1") + .build(); + + Group group2 = GroupBuilder.createGroup(context) + .withName("Test Parent group 2") + .build(); + + // Create a subgroup of parent group 1 + Group group3 = GroupBuilder.createGroup(context) + .withParent(group1) + .withName("Test subgroup") + .build(); + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + // Search for UUID in a group that the subgroup already belongs to. Should return ZERO results. + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group1.getID().toString()) + .param("query", group3.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + + // Search for UUID in a group that the subgroup does NOT belong to. Should return group via exact match + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group2.getID().toString()) + .param("query", group3.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.groups", Matchers.contains( + GroupMatcher.matchGroupEntry(group3.getID(), group3.getName()) + ))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + // Search for UUID of the group in the "group" param. Should return ZERO results, as "group" param is excluded + getClient(authTokenAdmin).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", group1.getID().toString()) + .param("query", group1.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + + @Test + public void searchIsNotMemberOfUnauthorized() throws Exception { + // To avoid creating data, just use the Admin & Anon groups for this test + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + + getClient().perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void searchIsNotMemberOfForbidden() throws Exception { + // To avoid creating data, just use the Admin & Anon groups for this test + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isForbidden()); + } + + @Test + public void searchIsNotMemberOfMissingOrInvalidParameter() throws Exception { + // To avoid creating data, just use the Admin & Anon groups for this test + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf")) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString())) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("group", adminGroup.getID().toString())) + .andExpect(status().isBadRequest()); + + // Test invalid group UUID + getClient(authToken).perform(get("/api/eperson/groups/search/isNotMemberOf") + .param("query", anonGroup.getID().toString()) + .param("group", "not-a-uuid")) + .andExpect(status().isBadRequest()); + } + @Test public void commAdminAndColAdminCannotExploitItemReadGroupTest() throws Exception { From 887c7c486ac48744ccdbf299113b5ab243371143 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 11 Oct 2023 16:29:43 -0500 Subject: [PATCH 557/686] Implement searchNonMembers for EPersonService. Add tests to prove it works (and tests for search()). Requires minor bug fix to AbstractHibernateDSODAO to allow for additional OR/AND clauses to be appended. (cherry picked from commit f186dcf4ca17f56478ce27946acdc2c269d8bd50) --- .../dspace/core/AbstractHibernateDSODAO.java | 6 +- .../dspace/eperson/EPersonServiceImpl.java | 94 ++++++- .../org/dspace/eperson/dao/EPersonDAO.java | 57 +++++ .../eperson/dao/impl/EPersonDAOImpl.java | 86 +++++-- .../eperson/service/EPersonService.java | 32 ++- .../java/org/dspace/eperson/EPersonTest.java | 242 ++++++++++++++---- 6 files changed, 434 insertions(+), 83 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java index e6535f094152..e9c6b95b7f05 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java @@ -83,13 +83,14 @@ protected void addMetadataValueWhereQuery(StringBuilder query, List search(Context context, String query) throws SQLException { @Override public List search(Context context, String query, int offset, int limit) throws SQLException { - try { - List ePerson = new ArrayList<>(); - EPerson person = find(context, UUID.fromString(query)); + List ePersons = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by firstname & lastname (NOTE: email will also be included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + ePersons = ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), + Arrays.asList(firstNameField, lastNameField), offset, limit); + } else { + // Search by UUID + EPerson person = find(context, uuid); if (person != null) { - ePerson.add(person); + ePersons.add(person); } - return ePerson; - } catch (IllegalArgumentException e) { + } + return ePersons; + } + + @Override + public int searchResultCount(Context context, String query) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Count results found by firstname & lastname (email is also included automatically) MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); if (StringUtils.isBlank(query)) { query = null; } - return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), - Arrays.asList(firstNameField, lastNameField), offset, limit); + result = ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); + } else { + // Search by UUID + EPerson person = find(context, uuid); + if (person != null) { + result = 1; + } } + return result; } @Override - public int searchResultCount(Context context, String query) throws SQLException { - MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); - MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); - if (StringUtils.isBlank(query)) { - query = null; + public List searchNonMembers(Context context, String query, Group excludeGroup, int offset, int limit) + throws SQLException { + List ePersons = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by firstname & lastname (NOTE: email will also be included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + ePersons = ePersonDAO.searchNotMember(context, query, Arrays.asList(firstNameField, lastNameField), + excludeGroup, Arrays.asList(firstNameField, lastNameField), + offset, limit); + } else { + // Search by UUID + EPerson person = find(context, uuid); + // Verify EPerson is NOT a member of the given excludeGroup before adding + if (person != null && !groupService.isDirectMember(excludeGroup, person)) { + ePersons.add(person); + } + } + + return ePersons; + } + + @Override + public int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Count results found by firstname & lastname (email is also included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + result = ePersonDAO.searchNotMemberCount(context, query, Arrays.asList(firstNameField, lastNameField), + excludeGroup); + } else { + // Search by UUID + EPerson person = find(context, uuid); + // Verify EPerson is NOT a member of the given excludeGroup before counting + if (person != null && !groupService.isDirectMember(excludeGroup, person)) { + result = 1; + } } - return ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); + return result; } @Override diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java index 9e78e758f92b..f7543570dffb 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java @@ -33,11 +33,68 @@ public interface EPersonDAO extends DSpaceObjectDAO, DSpaceObjectLegacy public EPerson findByNetid(Context context, String netid) throws SQLException; + /** + * Search all EPersons by the given MetadataField objects, sorting by the given sort fields. + *

    + * NOTE: As long as a query is specified, the EPerson's email address is included in the search alongside any given + * metadata fields. + * + * @param context DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param sortFields the metadata field(s) to sort the results by + * @param offset the position of the first result to return + * @param limit how many results return + * @return List of matching EPerson objects + * @throws SQLException if an error occurs + */ public List search(Context context, String query, List queryFields, List sortFields, int offset, int limit) throws SQLException; + /** + * Count number of EPersons who match a search on the given metadata fields. This returns the count of total + * results for the same query using the 'search()', and therefore can be used to provide pagination. + * + * @param context DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @return total number of EPersons who match the query + * @throws SQLException if an error occurs + */ public int searchResultCount(Context context, String query, List queryFields) throws SQLException; + /** + * Search all EPersons via their firstname, lastname, email (fuzzy match), limited to those EPersons which are NOT + * a member of the given group. This may be used to search across EPersons which are valid to add as members to the + * given group. + * + * @param context The DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @param offset the position of the first result to return + * @param limit how many results return + * @return EPersons matching the query (which are not members of the given group) + * @throws SQLException if database error + */ + List searchNotMember(Context context, String query, List queryFields, Group excludeGroup, + List sortFields, int offset, int limit) throws SQLException; + + /** + * Count number of EPersons that match a given search (fuzzy match) across firstname, lastname and email. This + * search is limited to those EPersons which are NOT a member of the given group. This may be used + * (with searchNotMember()) to perform a paginated search across EPersons which are valid to add to the given group. + * + * @param context The DSpace context + * @param query querystring to fuzzy match against. + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + int searchNotMemberCount(Context context, String query, List queryFields, Group excludeGroup) + throws SQLException; + /** * Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. This returns * EPersons ordered by UUID. diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java index bd68a7f399d9..4d64dd967ff8 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java @@ -70,17 +70,9 @@ public List search(Context context, String query, List q String queryString = "SELECT " + EPerson.class.getSimpleName() .toLowerCase() + " FROM EPerson as " + EPerson.class .getSimpleName().toLowerCase() + " "; - if (query != null) { - query = "%" + query.toLowerCase() + "%"; - } - Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, sortFields, null); - if (0 <= offset) { - hibernateQuery.setFirstResult(offset); - } - if (0 <= limit) { - hibernateQuery.setMaxResults(limit); - } + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, null, + sortFields, null, limit, offset); return list(hibernateQuery); } @@ -92,6 +84,28 @@ public int searchResultCount(Context context, String query, List return count(hibernateQuery); } + @Override + public List searchNotMember(Context context, String query, List queryFields, + Group excludeGroup, List sortFields, + int offset, int limit) throws SQLException { + String queryString = "SELECT " + EPerson.class.getSimpleName() + .toLowerCase() + " FROM EPerson as " + EPerson.class + .getSimpleName().toLowerCase() + " "; + + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup, + sortFields, null, limit, offset); + return list(hibernateQuery); + } + + public int searchNotMemberCount(Context context, String query, List queryFields, + Group excludeGroup) throws SQLException { + String queryString = "SELECT count(*) FROM EPerson as " + EPerson.class.getSimpleName().toLowerCase(); + + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup, + Collections.EMPTY_LIST, null, -1, -1); + return count(hibernateQuery); + } + @Override public List findAll(Context context, MetadataField metadataSortField, String sortField, int pageSize, int offset) throws SQLException { @@ -105,8 +119,8 @@ public List findAll(Context context, MetadataField metadataSortField, S sortFields = Collections.singletonList(metadataSortField); } - Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, sortFields, sortField, pageSize, - offset); + Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, null, + sortFields, sortField, pageSize, offset); return list(query); } @@ -178,43 +192,81 @@ public List findNotActiveSince(Context context, Date date) throws SQLEx protected Query getSearchQuery(Context context, String queryString, String queryParam, List queryFields, List sortFields, String sortField) throws SQLException { - return getSearchQuery(context, queryString, queryParam, queryFields, sortFields, sortField, -1, -1); + return getSearchQuery(context, queryString, queryParam, queryFields, null, sortFields, sortField, -1, -1); } + /** + * Build a search query across EPersons based on the given metadata fields and sorted based on the given metadata + * field(s) or database column. + *

    + * NOTE: the EPerson's email address is included in the search alongside any given metadata fields. + * + * @param context DSpace Context + * @param queryString String which defines the beginning "SELECT" for the SQL query + * @param queryParam Actual text being searched for + * @param queryFields List of metadata fields to search within + * @param excludeGroup Optional Group which should be excluded from search. Any EPersons who are members + * of this group will not be included in the results. + * @param sortFields Optional List of metadata fields to sort by (should not be specified if sortField is used) + * @param sortField Optional database column to sort on (should not be specified if sortFields is used) + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return built Query object + * @throws SQLException if error occurs + */ protected Query getSearchQuery(Context context, String queryString, String queryParam, - List queryFields, List sortFields, String sortField, - int pageSize, int offset) throws SQLException { - + List queryFields, Group excludeGroup, + List sortFields, String sortField, + int pageSize, int offset) throws SQLException { + // Initialize SQL statement using the passed in "queryString" StringBuilder queryBuilder = new StringBuilder(); queryBuilder.append(queryString); + Set metadataFieldsToJoin = new LinkedHashSet<>(); metadataFieldsToJoin.addAll(queryFields); metadataFieldsToJoin.addAll(sortFields); + // Append necessary join information for MetadataFields we will search within if (!CollectionUtils.isEmpty(metadataFieldsToJoin)) { addMetadataLeftJoin(queryBuilder, EPerson.class.getSimpleName().toLowerCase(), metadataFieldsToJoin); } - if (queryParam != null) { + // Always append a search on EPerson "email" based on query + if (StringUtils.isNotBlank(queryParam)) { addMetadataValueWhereQuery(queryBuilder, queryFields, "like", EPerson.class.getSimpleName().toLowerCase() + ".email like :queryParam"); } + // If excludeGroup is specified, exclude members of that group from results + // This uses a subquery to find the excluded group & verify that it is not in the EPerson list of "groups" + if (excludeGroup != null) { + queryBuilder.append(" AND (FROM Group g where g.id = :group_id) NOT IN elements (") + .append(EPerson.class.getSimpleName().toLowerCase()).append(".groups)"); + } + // Add sort/order by info to query, if specified if (!CollectionUtils.isEmpty(sortFields) || StringUtils.isNotBlank(sortField)) { addMetadataSortQuery(queryBuilder, sortFields, Collections.singletonList(sortField)); } + // Create the final SQL SELECT statement (based on included params above) Query query = createQuery(context, queryBuilder.toString()); + // Set pagesize & offset for pagination if (pageSize > 0) { query.setMaxResults(pageSize); } if (offset > 0) { query.setFirstResult(offset); } + // Set all parameters to the SQL SELECT statement (based on included params above) if (StringUtils.isNotBlank(queryParam)) { query.setParameter("queryParam", "%" + queryParam.toLowerCase() + "%"); } for (MetadataField metadataField : metadataFieldsToJoin) { query.setParameter(metadataField.toString(), metadataField.getID()); } + if (excludeGroup != null) { + query.setParameter("group_id", excludeGroup.getID()); + } + + query.setHint("org.hibernate.cacheable", Boolean.TRUE); return query; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index 5b10ea539b33..2afec161a672 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -98,9 +98,9 @@ public List search(Context context, String query) * * @param context The relevant DSpace Context. * @param query The search string - * @param offset Inclusive offset + * @param offset Inclusive offset (the position of the first result to return) * @param limit Maximum number of matches returned - * @return array of EPerson objects + * @return List of matching EPerson objects * @throws SQLException An exception that provides information on a database access error or other errors. */ public List search(Context context, String query, int offset, int limit) @@ -118,6 +118,34 @@ public List search(Context context, String query, int offset, int limit public int searchResultCount(Context context, String query) throws SQLException; + /** + * Find the EPersons that match the search query which are NOT currently members of the given Group. The search + * query is run against firstname, lastname or email. + * + * @param context DSpace context + * @param query The search string + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching EPerson objects + * @throws SQLException if error + */ + List searchNonMembers(Context context, String query, Group excludeGroup, + int offset, int limit) throws SQLException; + + /** + * Returns the total number of EPersons that match the search query which are NOT currently members of the given + * Group. The search query is run against firstname, lastname or email. Can be used with searchNonMembers() to + * support pagination + * + * @param context DSpace context + * @param query The search string + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @return List of matching EPerson objects + * @throws SQLException if error + */ + int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException; + /** * Find all the {@code EPerson}s in a specific order by field. * The sortable fields are: diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index 6c162c30d1ad..3780afcf6393 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -8,6 +8,7 @@ package org.dspace.eperson; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -15,6 +16,8 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -277,63 +280,184 @@ public void testFindByNetid() */ /** - * Test of search method, of class EPerson. + * Test of search() and searchResultCount() methods of EPersonService + * NOTE: Pagination is not verified here because it is tested in EPersonRestRepositoryIT */ -/* @Test - public void testSearch_Context_String() - throws Exception - { - System.out.println("search"); - Context context = null; - String query = ""; - EPerson[] expResult = null; - EPerson[] result = EPerson.search(context, query); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + public void testSearchAndCountByNameEmail() throws SQLException, AuthorizeException, IOException { + List allEPeopleAdded = new ArrayList<>(); + Group testGroup = createGroup("TestingGroup"); + try { + // Create 4 EPersons. Add a few to a test group to verify group membership doesn't matter + EPerson eperson1 = createEPersonAndAddToGroup("eperson1@example.com", "Jane", "Doe", testGroup); + EPerson eperson2 = createEPerson("eperson2@example.com", "John", "Doe"); + EPerson eperson3 = createEPersonAndAddToGroup("eperson3@example.com", "John", "Smith", testGroup); + EPerson eperson4 = createEPerson("eperson4@example.com", "Doe", "Smith"); + allEPeopleAdded.addAll(Arrays.asList(eperson1, eperson2, eperson3, eperson4)); + + List allJohns = Arrays.asList(eperson2, eperson3); + List searchJohnResults = ePersonService.search(context, "John", -1, -1); + assertTrue(searchJohnResults.containsAll(allJohns)); + assertEquals(searchJohnResults.size(), ePersonService.searchResultCount(context, "John")); + + List allDoes = Arrays.asList(eperson1, eperson2, eperson4); + List searchDoeResults = ePersonService.search(context, "Doe", -1, -1); + assertTrue(searchDoeResults.containsAll(allDoes)); + assertEquals(searchDoeResults.size(), ePersonService.searchResultCount(context, "Doe")); + + List allSmiths = Arrays.asList(eperson3, eperson4); + List searchSmithResults = ePersonService.search(context, "Smith", -1, -1); + assertTrue(searchSmithResults.containsAll(allSmiths)); + assertEquals(searchSmithResults.size(), ePersonService.searchResultCount(context, "Smith")); + + // Assert search on example.com returns everyone + List searchEmailResults = ePersonService.search(context, "example.com", -1, -1); + assertTrue(searchEmailResults.containsAll(allEPeopleAdded)); + assertEquals(searchEmailResults.size(), ePersonService.searchResultCount(context, "example.com")); + + // Assert exact email search returns just one + List exactEmailResults = ePersonService.search(context, "eperson1@example.com", -1, -1); + assertTrue(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchResultCount(context, "eperson1@example.com")); + + // Assert UUID search returns exact match + List uuidResults = ePersonService.search(context, eperson4.getID().toString(), -1, -1); + assertTrue(uuidResults.contains(eperson4)); + assertEquals(1, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchResultCount(context, eperson4.getID().toString())); + } finally { + // Remove all Groups & EPersons we added for this test + context.turnOffAuthorisationSystem(); + groupService.delete(context, testGroup); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } } -*/ /** - * Test of search method, of class EPerson. + * Test of searchNonMembers() and searchNonMembersCount() methods of EPersonService + * NOTE: Pagination is not verified here because it is tested in EPersonRestRepositoryIT */ -/* @Test - public void testSearch_4args() - throws Exception - { - System.out.println("search"); - Context context = null; - String query = ""; - int offset = 0; - int limit = 0; - EPerson[] expResult = null; - EPerson[] result = EPerson.search(context, query, offset, limit); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ + public void testSearchAndCountByNameEmailNonMembers() throws SQLException, AuthorizeException, IOException { + List allEPeopleAdded = new ArrayList<>(); + Group testGroup1 = createGroup("TestingGroup1"); + Group testGroup2 = createGroup("TestingGroup2"); + Group testGroup3 = createGroup("TestingGroup3"); + try { + // Create two EPersons in Group 1 + EPerson eperson1 = createEPersonAndAddToGroup("eperson1@example.com", "Jane", "Doe", testGroup1); + EPerson eperson2 = createEPersonAndAddToGroup("eperson2@example.com", "John", "Smith", testGroup1); - /** - * Test of searchResultCount method, of class EPerson. - */ -/* - @Test - public void testSearchResultCount() - throws Exception - { - System.out.println("searchResultCount"); - Context context = null; - String query = ""; - int expResult = 0; - int result = EPerson.searchResultCount(context, query); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + // Create one more EPerson, and add it and a previous EPerson to Group 2 + EPerson eperson3 = createEPersonAndAddToGroup("eperson3@example.com", "John", "Doe", testGroup2); + context.turnOffAuthorisationSystem(); + groupService.addMember(context, testGroup2, eperson2); + groupService.update(context, testGroup2); + ePersonService.update(context, eperson2); + context.restoreAuthSystemState(); + + // Create 2 more EPersons with no group memberships + EPerson eperson4 = createEPerson("eperson4@example.com", "John", "Anthony"); + EPerson eperson5 = createEPerson("eperson5@example.org", "Smith", "Doe"); + allEPeopleAdded.addAll(Arrays.asList(eperson1, eperson2, eperson3, eperson4, eperson5)); + + // FIRST, test search by last name + // Verify all Does match a nonMember search of Group3 (which is an empty group) + List allDoes = Arrays.asList(eperson1, eperson3, eperson5); + List searchDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup3, -1, -1); + assertTrue(searchDoeResults.containsAll(allDoes)); + assertEquals(searchDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", testGroup3)); + + // Verify searching "Doe" with Group 2 *excludes* the one which is already a member + List allNonMemberDoes = Arrays.asList(eperson1, eperson5); + List searchNonMemberDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup2, + -1, -1); + assertTrue(searchNonMemberDoeResults.containsAll(allNonMemberDoes)); + assertFalse(searchNonMemberDoeResults.contains(eperson3)); + assertEquals(searchNonMemberDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", + testGroup2)); + + // Verify searching "Doe" with Group 1 *excludes* the one which is already a member + allNonMemberDoes = Arrays.asList(eperson3, eperson5); + searchNonMemberDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup1, -1, -1); + assertTrue(searchNonMemberDoeResults.containsAll(allNonMemberDoes)); + assertFalse(searchNonMemberDoeResults.contains(eperson1)); + assertEquals(searchNonMemberDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", + testGroup1)); + + // SECOND, test search by first name + // Verify all Johns match a nonMember search of Group3 (which is an empty group) + List allJohns = Arrays.asList(eperson2, eperson3, eperson4); + List searchJohnResults = ePersonService.searchNonMembers(context, "John", + testGroup3, -1, -1); + assertTrue(searchJohnResults.containsAll(allJohns)); + assertEquals(searchJohnResults.size(), ePersonService.searchNonMembersCount(context, "John", + testGroup3)); + + // Verify searching "John" with Group 2 *excludes* the two who are already a member + List allNonMemberJohns = Arrays.asList(eperson4); + List searchNonMemberJohnResults = ePersonService.searchNonMembers(context, "John", + testGroup2, -1, -1); + assertTrue(searchNonMemberJohnResults.containsAll(allNonMemberJohns)); + assertFalse(searchNonMemberJohnResults.contains(eperson2)); + assertFalse(searchNonMemberJohnResults.contains(eperson3)); + assertEquals(searchNonMemberJohnResults.size(), ePersonService.searchNonMembersCount(context, "John", + testGroup2)); + + // FINALLY, test search by email + // Assert search on example.com excluding Group 1 returns just those not in that group + List exampleNonMembers = Arrays.asList(eperson3, eperson4); + List searchEmailResults = ePersonService.searchNonMembers(context, "example.com", + testGroup1, -1, -1); + assertTrue(searchEmailResults.containsAll(exampleNonMembers)); + assertFalse(searchEmailResults.contains(eperson1)); + assertFalse(searchEmailResults.contains(eperson2)); + assertEquals(searchEmailResults.size(), ePersonService.searchNonMembersCount(context, "example.com", + testGroup1)); + + // Assert exact email search returns just one (if not in group) + List exactEmailResults = ePersonService.searchNonMembers(context, "eperson1@example.com", + testGroup2, -1, -1); + assertTrue(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchNonMembersCount(context, "eperson1@example.com", + testGroup2)); + // But, change the group to one they are a member of, and they won't be included + exactEmailResults = ePersonService.searchNonMembers(context, "eperson1@example.com", + testGroup1, -1, -1); + assertFalse(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchNonMembersCount(context, "eperson1@example.com", + testGroup1)); + + // Assert UUID search returns exact match (if not in group) + List uuidResults = ePersonService.searchNonMembers(context, eperson3.getID().toString(), + testGroup1, -1, -1); + assertTrue(uuidResults.contains(eperson3)); + assertEquals(1, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchNonMembersCount(context, eperson3.getID().toString(), + testGroup1)); + // But, change the group to one they are a member of, and you'll get no results + uuidResults = ePersonService.searchNonMembers(context, eperson3.getID().toString(), + testGroup2, -1, -1); + assertFalse(uuidResults.contains(eperson3)); + assertEquals(0, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchNonMembersCount(context, eperson3.getID().toString(), + testGroup2)); + + } finally { + // Remove all Groups & EPersons we added for this test + context.turnOffAuthorisationSystem(); + groupService.delete(context, testGroup1); + groupService.delete(context, testGroup2); + groupService.delete(context, testGroup3); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } } -*/ /** * Test of findAll method, of class EPerson. @@ -1149,6 +1273,17 @@ protected EPerson createEPersonAndAddToGroup(String email, Group group) throws S return ePerson; } + protected EPerson createEPersonAndAddToGroup(String email, String firstname, String lastname, Group group) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = createEPerson(email, firstname, lastname); + groupService.addMember(context, group, ePerson); + groupService.update(context, group); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + protected EPerson createEPerson(String email) throws SQLException, AuthorizeException { context.turnOffAuthorisationSystem(); EPerson ePerson = ePersonService.create(context); @@ -1157,4 +1292,15 @@ protected EPerson createEPerson(String email) throws SQLException, AuthorizeExce context.restoreAuthSystemState(); return ePerson; } + protected EPerson createEPerson(String email, String firstname, String lastname) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = ePersonService.create(context); + ePerson.setEmail(email); + ePerson.setFirstName(context, firstname); + ePerson.setLastName(context, lastname); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } } From b65ddc36380e3480d38257597c174c866108a848 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Thu, 12 Oct 2023 12:09:41 -0500 Subject: [PATCH 558/686] Add /epersons/search/isNotMemberOf endpoint to REST API along with integration tests (cherry picked from commit 5208a355d69c86dc7cb3ea372656c6959664fd9a) --- .../repository/EPersonRestRepository.java | 34 +++ .../rest/repository/GroupRestRepository.java | 2 +- .../app/rest/EPersonRestRepositoryIT.java | 240 ++++++++++++++++++ 3 files changed, 275 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java index 062f7b7a9482..bd42b7420649 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/EPersonRestRepository.java @@ -38,9 +38,11 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.EmptyWorkflowGroupException; +import org.dspace.eperson.Group; import org.dspace.eperson.RegistrationData; import org.dspace.eperson.service.AccountService; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -79,6 +81,9 @@ public class EPersonRestRepository extends DSpaceObjectRestRepository findByMetadata(@Parameter(value = "query", required = t } } + /** + * Find the EPersons matching the query parameter which are NOT a member of the given Group. + * The search is delegated to the + * {@link EPersonService#searchNonMembers(Context, String, Group, int, int)} method + * + * @param groupUUID the *required* group UUID to exclude results from + * @param query is the *required* query string + * @param pageable contains the pagination information + * @return a Page of EPersonRest instances matching the user query + */ + @PreAuthorize("hasAuthority('ADMIN') || hasAuthority('MANAGE_ACCESS_GROUP')") + @SearchRestMethod(name = "isNotMemberOf") + public Page findIsNotMemberOf(@Parameter(value = "group", required = true) UUID groupUUID, + @Parameter(value = "query", required = true) String query, + Pageable pageable) { + + try { + Context context = obtainContext(); + Group excludeGroup = groupService.find(context, groupUUID); + long total = es.searchNonMembersCount(context, query, excludeGroup); + List epersons = es.searchNonMembers(context, query, excludeGroup, + Math.toIntExact(pageable.getOffset()), + Math.toIntExact(pageable.getPageSize())); + return converter.toRestPage(epersons, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + @Override @PreAuthorize("hasPermission(#uuid, 'EPERSON', #patch)") protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java index 9eb92d8e6fe5..a3b525387c62 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/GroupRestRepository.java @@ -151,7 +151,7 @@ public Page findByMetadata(@Parameter(value = "query", required = tru /** * Find the Groups matching the query parameter which are NOT a member of the given parent Group. * The search is delegated to the - * {@link GroupService#search(Context, String, int, int)} method + * {@link GroupService#searchNonMembers(Context, String, Group, int, int)} method * * @param groupUUID the parent group UUID * @param query is the *required* query string diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java index 1f09779ab0e9..6c20dcff4a80 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonRestRepositoryIT.java @@ -79,6 +79,7 @@ import org.dspace.eperson.PasswordHash; import org.dspace.eperson.service.AccountService; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.dspace.services.ConfigurationService; import org.hamcrest.Matchers; @@ -96,6 +97,9 @@ public class EPersonRestRepositoryIT extends AbstractControllerIntegrationTest { @Autowired private EPersonService ePersonService; + @Autowired + private GroupService groupService; + @Autowired private ConfigurationService configurationService; @@ -775,6 +779,242 @@ public void findByMetadataMissingParameter() throws Exception { .andExpect(status().isBadRequest()); } + // Test of /epersons/search/isNotMemberOf pagination + // NOTE: Additional tests of 'isNotMemberOf' search functionality can be found in EPersonTest in 'dspace-api' + @Test + public void searchIsNotMemberOfPaginationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("Test Parent group") + .build(); + // Create two EPerson in main group. These SHOULD NOT be included in pagination + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test", "Person") + .withEmail("test@example.com") + .withGroupMembership(group) + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test2", "Person") + .withEmail("test2@example.com") + .withGroupMembership(group) + .build(); + + // Create five EPersons who are NOT members of that group. These SHOULD be included in pagination + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test3", "Person") + .withEmail("test3@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test4", "Person") + .withEmail("test4@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test5", "Person") + .withEmail("test5@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test6", "Person") + .withEmail("test6@example.com") + .build(); + EPersonBuilder.createEPerson(context) + .withNameInMetadata("Test7", "Person") + .withEmail("test7@example.com") + .build(); + + context.restoreAuthSystemState(); + + String authTokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(authTokenAdmin).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "person") + .param("page", "0") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(0))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "person") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(2))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(1))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + + getClient(authTokenAdmin).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", group.getID().toString()) + .param("query", "person") + .param("page", "2") + .param("size", "2")) + .andExpect(status().isOk()).andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.everyItem( + hasJsonPath("$.type", is("eperson"))) + )) + .andExpect(jsonPath("$._embedded.epersons").value(Matchers.hasSize(1))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.number", is(2))) + .andExpect(jsonPath("$.page.totalPages", is(3))) + .andExpect(jsonPath("$.page.totalElements", is(5))); + } + + @Test + public void searchIsNotMemberOfByEmail() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .build(); + Group group2 = GroupBuilder.createGroup(context) + .withName("Test another group") + .build(); + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withNameInMetadata("John", "Doe") + .withEmail("Johndoe@example.com") + .withGroupMembership(group) + .build(); + + EPerson ePerson2 = EPersonBuilder.createEPerson(context) + .withNameInMetadata("Jane", "Smith") + .withEmail("janesmith@example.com") + .build(); + + EPerson ePerson3 = EPersonBuilder.createEPerson(context) + .withNameInMetadata("Tom", "Doe") + .withEmail("tomdoe@example.com") + .build(); + + EPerson ePerson4 = EPersonBuilder.createEPerson(context) + .withNameInMetadata("Harry", "Prefix-Doe") + .withEmail("harrydoeprefix@example.com") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(admin.getEmail(), password); + // Search for exact email in a group the person already belongs to. Should return zero results. + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getEmail()) + .param("group", group.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + + // Search for exact email in a group the person does NOT belong to. Should return the person + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getEmail()) + .param("group", group2.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.contains( + EPersonMatcher.matchEPersonEntry(ePerson) + ))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + // Search partial email should return all the people created above. + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", "example.com") + .param("group", group2.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.containsInAnyOrder( + EPersonMatcher.matchEPersonEntry(ePerson), + EPersonMatcher.matchEPersonEntry(ePerson2), + EPersonMatcher.matchEPersonEntry(ePerson3), + EPersonMatcher.matchEPersonEntry(ePerson4) + ))); + } + + @Test + public void searchIsNotMemberOfByUUID() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .withName("Test group") + .build(); + Group group2 = GroupBuilder.createGroup(context) + .withName("Test another group") + .build(); + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withNameInMetadata("John", "Doe") + .withEmail("Johndoe@example.com") + .withGroupMembership(group) + .build(); + context.restoreAuthSystemState(); + + String authToken = getAuthToken(admin.getEmail(), password); + // Search for UUID in a group the person already belongs to. Should return zero results. + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getID().toString()) + .param("group", group.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))); + + // Search for exact email in a group the person does NOT belong to. Should return the person + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", ePerson.getID().toString()) + .param("group", group2.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.epersons", Matchers.contains( + EPersonMatcher.matchEPersonEntry(ePerson) + ))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + } + + @Test + public void searchIsNotMemberOfUnauthorized() throws Exception { + Group adminGroup = groupService.findByName(context, Group.ADMIN); + getClient().perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void searchIsNotMemberOfForbidden() throws Exception { + Group adminGroup = groupService.findByName(context, Group.ADMIN); + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString()) + .param("group", adminGroup.getID().toString())) + .andExpect(status().isForbidden()); + } + + @Test + public void searchIsNotMemberOfMissingOrInvalidParameter() throws Exception { + Group adminGroup = groupService.findByName(context, Group.ADMIN); + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf")) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString())) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("group", adminGroup.getID().toString())) + .andExpect(status().isBadRequest()); + + // Test invalid group UUID + getClient(authToken).perform(get("/api/eperson/epersons/search/isNotMemberOf") + .param("query", eperson.getID().toString()) + .param("group", "not-a-uuid")) + .andExpect(status().isBadRequest()); + } + @Test public void deleteOne() throws Exception { context.turnOffAuthorisationSystem(); From c2fd6179218e3604001fbc659e99e0c58ce97d64 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 17 Oct 2023 16:27:51 -0500 Subject: [PATCH 559/686] Bug fix to EPersonDAOImpl. Correctly determine if excluded group needs to be preceded by AND or WHERE (cherry picked from commit e5e0eaa9999a96f499c131e02877d4280f7b5263) --- .../java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java index 4d64dd967ff8..87d6c5869b09 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java @@ -238,7 +238,14 @@ protected Query getSearchQuery(Context context, String queryString, String query // If excludeGroup is specified, exclude members of that group from results // This uses a subquery to find the excluded group & verify that it is not in the EPerson list of "groups" if (excludeGroup != null) { - queryBuilder.append(" AND (FROM Group g where g.id = :group_id) NOT IN elements (") + // If query params exist, then we already have a WHERE clause (see above) and just need to append an AND + if (StringUtils.isNotBlank(queryParam)) { + queryBuilder.append(" AND "); + } else { + // no WHERE clause yet, so this is the start of the WHERE + queryBuilder.append(" WHERE "); + } + queryBuilder.append("(FROM Group g where g.id = :group_id) NOT IN elements (") .append(EPerson.class.getSimpleName().toLowerCase()).append(".groups)"); } // Add sort/order by info to query, if specified From 7d7031a1b65d11963bda0fe86e2c070a85b14b44 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 13 Nov 2023 11:49:54 +0100 Subject: [PATCH 560/686] [DSC-1229] Fixes flyway script naming issue --- ...=> V7.5_2023.09.22__registration_data.sql} | 0 .../V7.5_2023.09.22__registration_data.sql | 64 +++++++++++++++++++ .../V7.6_2023.09.22__registration_data.sql | 52 --------------- 3 files changed, 64 insertions(+), 52 deletions(-) rename dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/{V7.6_2023.09.22__registration_data.sql => V7.5_2023.09.22__registration_data.sql} (100%) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.09.22__registration_data.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.09.22__registration_data.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2023.09.22__registration_data.sql similarity index 100% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.09.22__registration_data.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2023.09.22__registration_data.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql new file mode 100644 index 000000000000..91bc76e7e887 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2023.09.22__registration_data.sql @@ -0,0 +1,64 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- ALTER table registrationdata +----------------------------------------------------------------------------------- + +DO $$ + BEGIN + if exists (select constraint_name + from information_schema.constraint_column_usage + where TABLE_SCHEMA = 'public' AND TABLE_NAME = 'registrationdata' AND COLUMN_NAME = 'email') then + EXECUTE 'ALTER TABLE registrationdata DROP CONSTRAINT ' || + QUOTE_IDENT(( + SELECT CONSTRAINT_NAME + FROM information_schema.key_column_usage + WHERE TABLE_SCHEMA = 'public' AND TABLE_NAME = 'registrationdata' AND COLUMN_NAME = 'email' + )); + end if; + end +$$; + +ALTER TABLE registrationdata +ADD COLUMN IF NOT EXISTS registration_type VARCHAR(255); + +ALTER TABLE registrationdata +ADD COLUMN IF NOT EXISTS net_id VARCHAR(64); + +CREATE SEQUENCE IF NOT EXISTS registrationdata_metadatavalue_seq START WITH 1 INCREMENT BY 1; + +----------------------------------------------------------------------------------- +-- Creates table registrationdata_metadata +----------------------------------------------------------------------------------- +DO $$ + BEGIN + IF NOT EXISTS (SELECT FROM pg_catalog.pg_tables + WHERE schemaname = 'public' + AND tablename = 'registrationdata_metadata') THEN + CREATE TABLE registrationdata_metadata ( + registrationdata_metadata_id INTEGER NOT NULL, + registrationdata_id INTEGER, + metadata_field_id INTEGER, + text_value TEXT, + CONSTRAINT pk_registrationdata_metadata PRIMARY KEY (registrationdata_metadata_id) + ); + + ALTER TABLE registrationdata_metadata + ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_METADATA_FIELD + FOREIGN KEY (metadata_field_id) + REFERENCES metadatafieldregistry (metadata_field_id) ON DELETE CASCADE; + + ALTER TABLE registrationdata_metadata + ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_REGISTRATIONDATA + FOREIGN KEY (registrationdata_id) + REFERENCES registrationdata (registrationdata_id) ON DELETE CASCADE; + + END IF; + END +$$; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.09.22__registration_data.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.09.22__registration_data.sql deleted file mode 100644 index 69e2d34b4b4e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.09.22__registration_data.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- ALTER table registrationdata ------------------------------------------------------------------------------------ - -DO $$ - BEGIN - EXECUTE 'ALTER TABLE registrationdata DROP CONSTRAINT ' || - QUOTE_IDENT(( - SELECT CONSTRAINT_NAME - FROM information_schema.key_column_usage - WHERE TABLE_SCHEMA = 'public' AND TABLE_NAME = 'registrationdata' AND COLUMN_NAME = 'email' - )); - end -$$; - -ALTER TABLE registrationdata -ADD COLUMN registration_type VARCHAR(255); - -ALTER TABLE registrationdata -ADD COLUMN net_id VARCHAR(64); - -CREATE SEQUENCE IF NOT EXISTS registrationdata_metadatavalue_seq START WITH 1 INCREMENT BY 1; - ------------------------------------------------------------------------------------ --- Creates table registrationdata_metadata ------------------------------------------------------------------------------------ - -CREATE TABLE registrationdata_metadata ( - registrationdata_metadata_id INTEGER NOT NULL, - registrationdata_id INTEGER, - metadata_field_id INTEGER, - text_value TEXT, - CONSTRAINT pk_registrationdata_metadata PRIMARY KEY (registrationdata_metadata_id) -); - -ALTER TABLE registrationdata_metadata -ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_METADATA_FIELD - FOREIGN KEY (metadata_field_id) - REFERENCES metadatafieldregistry (metadata_field_id) ON DELETE CASCADE; - -ALTER TABLE registrationdata_metadata -ADD CONSTRAINT FK_REGISTRATIONDATA_METADATA_ON_REGISTRATIONDATA - FOREIGN KEY (registrationdata_id) - REFERENCES registrationdata (registrationdata_id) ON DELETE CASCADE; From 41660822c2b2dd84bde99467b72229d7a19f9a90 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Mon, 13 Nov 2023 15:23:15 +0100 Subject: [PATCH 561/686] [DSC-1351] improvement in curation execution --- .../src/main/java/org/dspace/curate/Curator.java | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index 696b7078c460..6992e46efdfb 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -488,14 +488,15 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { * @throws SQLException */ protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, SQLException { - if (!tr.run(comm)) { - return false; - } UUIDIterator subComIter = new UUIDIterator(curationContext(), comm.getSubcommunities(), Community.class); UUIDIterator collectionsIter = new UUIDIterator(curationContext(), comm.getCollections(), Collection.class); + + if (!tr.run(comm)) { + return false; + } while (subComIter.hasNext()) { if (!doCommunity(tr, subComIter.next())) { @@ -530,9 +531,6 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio Item item = iter.next(); boolean shouldContinue = tr.run(item); context.uncacheEntity(item); - if (txScope.equals(TxScope.OBJECT) && context.isValid()) { - context.commit(); - } if (!shouldContinue) { return false; } @@ -554,7 +552,7 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio protected void visit(DSpaceObject dso) throws IOException, SQLException { Context curCtx = curationContext(); if (curCtx != null && txScope.equals(TxScope.OBJECT)) { - curCtx.dispatchEvents(); + curCtx.commit(); } } From 366c4381ebef1ad0e915578860339aaa101e08d0 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Mon, 13 Nov 2023 17:04:40 +0200 Subject: [PATCH 562/686] [DSC-1105] renaming the sql file --- ...13__align_sequences_DBMSImportFramework2_with_current_ids.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/{V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql => V7.6_2023.11.13__align_sequences_DBMSImportFramework2_with_current_ids.sql} (100%) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.11.13__align_sequences_DBMSImportFramework2_with_current_ids.sql similarity index 100% rename from dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.4_2023.05.26__align_sequences_DBMSImportFramework2_with_current_ids.sql rename to dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.11.13__align_sequences_DBMSImportFramework2_with_current_ids.sql From 6b99584e963a318608453550446dfce838ffe535 Mon Sep 17 00:00:00 2001 From: Michael Spalti Date: Wed, 8 Nov 2023 16:00:26 -0800 Subject: [PATCH 563/686] Updated IIIF Controller IT to text bitstream and bundle exclusions (cherry picked from commit e92b4b7bfdc08efab9aee9b8f07506273ee2bfcb) --- .../org/dspace/builder/BitstreamBuilder.java | 51 +++++++++++ .../app/rest/iiif/IIIFControllerIT.java | 87 +++++++++++++++++++ 2 files changed, 138 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java index 424833e5cc65..f72ce70cd4dd 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java @@ -17,7 +17,11 @@ import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; +import org.dspace.content.service.MetadataValueService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; @@ -54,6 +58,13 @@ public static BitstreamBuilder createBitstream(Context context, Item item, Input return builder.createInRequestedBundle(context, item, is, bundleName); } + public static BitstreamBuilder createBitstream(Context context, Item item, InputStream is, + String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException, IOException { + BitstreamBuilder builder = new BitstreamBuilder(context); + return builder.createInRequestedBundleWithIiifDisabled(context, item, is, bundleName, iiifEnabled); + } + private BitstreamBuilder create(Context context, Item item, InputStream is) throws SQLException, AuthorizeException, IOException { this.context = context; @@ -87,6 +98,41 @@ private BitstreamBuilder createInRequestedBundle(Context context, Item item, Inp return this; } + private BitstreamBuilder createInRequestedBundleWithIiifDisabled(Context context, Item item, InputStream is, + String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException, IOException { + this.context = context; + this.item = item; + + Bundle bundle = getBundleByNameAndIiiEnabled(item, bundleName, iiifEnabled); + + bitstream = bitstreamService.create(context, bundle, is); + + return this; + } + + private Bundle getBundleByNameAndIiiEnabled(Item item, String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException { + List bundles = itemService.getBundles(item, bundleName); + Bundle targetBundle = null; + + if (bundles.size() < 1) { + // not found, create a new one + targetBundle = bundleService.create(context, item, bundleName); + MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); + MetadataField iiifEnabledField = metadataFieldService. + findByString(context, "dspace.iiif.enabled", '.'); + MetadataValue metadataValue = metadataValueService.create(context, targetBundle, iiifEnabledField); + metadataValue.setValue(String.valueOf(iiifEnabled)); + + } else { + // put bitstreams into first bundle + targetBundle = bundles.iterator().next(); + } + return targetBundle; + } + + private Bundle getBundleByName(Item item, String bundleName) throws SQLException, AuthorizeException { List bundles = itemService.getBundles(item, bundleName); Bundle targetBundle = null; @@ -136,6 +182,11 @@ public BitstreamBuilder withProvenance(String provenance) throws SQLException { } + public BitstreamBuilder withIIIFDisabled() throws SQLException { + bitstreamService.addMetadata(context, bitstream, "dspace", "iiif", "enabled", null, "false"); + return this; + } + public BitstreamBuilder withIIIFLabel(String label) throws SQLException { bitstreamService.addMetadata(context, bitstream, "iiif", "label", null, null, label); return this; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java index b4d1f785d4c9..d17db108bab6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java @@ -221,6 +221,93 @@ public void findOneIIIFSearchableWithMixedConfigIT() throws Exception { .andExpect(jsonPath("$.service").exists()); } + @Test + public void findOneWithExcludedBitstreamIT() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .enableIIIF() + .build(); + + String bitstreamContent = "ThisIsSomeText"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream1.jpg") + .withMimeType("image/jpeg") + .withIIIFLabel("Custom Label") + .build(); + } + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFDisabled() + .build(); + } + context.restoreAuthSystemState(); + // Expect canvas label, width and height to match bitstream description. + getClient().perform(get("/iiif/" + publicItem1.getID() + "/manifest")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) + .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].@id", + Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/c0"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); + } + + @Test + public void findOneWithExcludedBitstreamBundleIT() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .enableIIIF() + .build(); + + String bitstreamContent = "ThisIsSomeText"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream1.jpg") + .withMimeType("image/jpeg") + .withIIIFLabel("Custom Label") + .build(); + } + // Add bitstream + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder + .createBitstream(context, publicItem1, is, "ExcludedBundle", false) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + } + context.restoreAuthSystemState(); + // Expect canvas label, width and height to match bitstream description. + getClient().perform(get("/iiif/" + publicItem1.getID() + "/manifest")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) + .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].@id", + Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/c0"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); + } + + @Test public void findOneIIIFSearchableWithCustomBundleAndConfigIT() throws Exception { context.turnOffAuthorisationSystem(); From 74733ab4b4534b6ae0ff78fea2f64027b166ce75 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Wed, 1 Nov 2023 12:19:25 +0100 Subject: [PATCH 564/686] Simplified the process of fixing the tests after adding new sidebar facets/search filters and sort options to discover.xml (cherry picked from commit b40ad0dfc23040f335d6c6be0fcd0ae6e68a318f) --- .../app/rest/DiscoveryRestControllerIT.java | 671 ++++++++++-------- 1 file changed, 377 insertions(+), 294 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index a115c8aa2f15..80d8ab2df422 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -26,6 +26,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; import java.util.UUID; import com.jayway.jsonpath.matchers.JsonPathMatchers; @@ -69,6 +71,7 @@ import org.dspace.supervision.SupervisionOrder; import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; +import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.Ignore; import org.junit.Test; @@ -85,6 +88,24 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest @Autowired ChoiceAuthorityService choiceAuthorityService; + /** + * This field has been created to easily modify the tests when updating the defaultConfiguration's sidebar facets + */ + List> customSidebarFacets = List.of( + ); + + /** + * This field has been created to easily modify the tests when updating the defaultConfiguration's search filters + */ + List> customSearchFilters = List.of( + ); + + /** + * This field has been created to easily modify the tests when updating the defaultConfiguration's sort fields + */ + List> customSortFields = List.of( + ); + @Test public void rootDiscoverTest() throws Exception { @@ -105,6 +126,14 @@ public void rootDiscoverTest() throws Exception { @Test public void discoverFacetsTestWithoutParameters() throws Exception { + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); //When we call this facets endpoint getClient().perform(get("/api/discover/facets")) @@ -116,13 +145,7 @@ public void discoverFacetsTestWithoutParameters() throws Exception { //There needs to be a self link to this endpoint .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) //We have 4 facets in the default configuration, they need to all be present in the embedded section - .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false))) - ); + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(allExpectedSidebarFacets))); } @Test @@ -266,7 +289,7 @@ public void discoverFacetsAuthorWithAuthorityWithSizeParameter() throws Exceptio .andExpect(status().isOk()) //The type needs to be 'discover' .andExpect(jsonPath("$.type", is("discover"))) - //The name of the facet needs to be seubject, because that's what we called + //The name of the facet needs to be author, because that's what we called .andExpect(jsonPath("$.name", is("author"))) //Because we've constructed such a structure so that we have more than 2 (size) subjects, there // needs to be a next link @@ -1194,6 +1217,34 @@ public void discoverFacetsDateTestWithSearchFilter() throws Exception { @Test public void discoverSearchTest() throws Exception { + List> allExpectedSearchFilters = new ArrayList<>(customSearchFilters); + allExpectedSearchFilters.addAll(List.of( + SearchFilterMatcher.titleFilter(), + SearchFilterMatcher.authorFilter(), + SearchFilterMatcher.subjectFilter(), + SearchFilterMatcher.dateIssuedFilter(), + SearchFilterMatcher.hasContentInOriginalBundleFilter(), + SearchFilterMatcher.hasFileNameInOriginalBundleFilter(), + SearchFilterMatcher.hasFileDescriptionInOriginalBundleFilter(), + SearchFilterMatcher.entityTypeFilter(), + SearchFilterMatcher.isAuthorOfPublicationRelation(), + SearchFilterMatcher.isProjectOfPublicationRelation(), + SearchFilterMatcher.isOrgUnitOfPublicationRelation(), + SearchFilterMatcher.isPublicationOfJournalIssueRelation(), + SearchFilterMatcher.isJournalOfPublicationRelation() + )); + + List> allExpectedSortFields = new ArrayList<>(customSortFields); + allExpectedSortFields.addAll(List.of( + SortOptionMatcher.sortOptionMatcher( + "score", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.title", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.date.issued", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), + SortOptionMatcher.sortOptionMatcher( + "dc.date.accessioned", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()) + )); //When calling this root endpoint getClient().perform(get("/api/discover/search")) @@ -1208,32 +1259,9 @@ public void discoverSearchTest() throws Exception { .andExpect(jsonPath("$._links.self.href", containsString("api/discover/search"))) //There needs to be a section where these filters as specified as they're the default filters // given in the configuration - .andExpect(jsonPath("$.filters", containsInAnyOrder( - SearchFilterMatcher.titleFilter(), - SearchFilterMatcher.authorFilter(), - SearchFilterMatcher.subjectFilter(), - SearchFilterMatcher.dateIssuedFilter(), - SearchFilterMatcher.hasContentInOriginalBundleFilter(), - SearchFilterMatcher.hasFileNameInOriginalBundleFilter(), - SearchFilterMatcher.hasFileDescriptionInOriginalBundleFilter(), - SearchFilterMatcher.entityTypeFilter(), - SearchFilterMatcher.isAuthorOfPublicationRelation(), - SearchFilterMatcher.isProjectOfPublicationRelation(), - SearchFilterMatcher.isOrgUnitOfPublicationRelation(), - SearchFilterMatcher.isPublicationOfJournalIssueRelation(), - SearchFilterMatcher.isJournalOfPublicationRelation() - ))) + .andExpect(jsonPath("$.filters", containsInAnyOrder(allExpectedSearchFilters))) //These sortOptions need to be present as it's the default in the configuration - .andExpect(jsonPath("$.sortOptions", contains( - SortOptionMatcher.sortOptionMatcher( - "score", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.title", DiscoverySortFieldConfiguration.SORT_ORDER.asc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.date.issued", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()), - SortOptionMatcher.sortOptionMatcher( - "dc.date.accessioned", DiscoverySortFieldConfiguration.SORT_ORDER.desc.name()) - ))); + .andExpect(jsonPath("$.sortOptions", contains(allExpectedSortFields))); } @Test @@ -1337,6 +1365,14 @@ public void discoverSearchObjectsTest() throws Exception { //** WHEN ** //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** //The status has to be 200 OK @@ -1363,13 +1399,7 @@ public void discoverSearchObjectsTest() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1473,6 +1503,14 @@ public void discoverSearchObjectsTestHasMoreAuthorFacet() throws Exception { //** WHEN ** //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(true), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** //The status has to be 200 OK @@ -1502,13 +1540,7 @@ public void discoverSearchObjectsTestHasMoreAuthorFacet() throws Exception { // property because we don't exceed their default limit for a hasMore true (the default is 10) //We do however exceed the limit for the authors, so this property has to be true for the author // facet - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(true), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1564,7 +1596,15 @@ public void discoverSearchObjectsTestHasMoreSubjectFacet() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** @@ -1592,13 +1632,7 @@ public void discoverSearchObjectsTestHasMoreSubjectFacet() throws Exception { // property because we don't exceed their default limit for a hasMore true (the default is 10) //We do however exceed the limit for the subject, so this property has to be true for the subject // facet - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1645,8 +1679,16 @@ public void discoverSearchObjectsTestWithBasicQuery() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query that says that the title has to contain 'test' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,contains")) @@ -1666,19 +1708,13 @@ public void discoverSearchObjectsTestWithBasicQuery() throws Exception { SearchResultMatcher.match("core", "item", "items"), SearchResultMatcher.match("core", "item", "items") ))) - //We need to display the appliedFilters object that contains the query that we've ran + //We need to display the appliedFilters object that contains the query that we've run .andExpect(jsonPath("$.appliedFilters", contains( AppliedFilterMatcher.appliedFilterEntry("title", "contains", "test", "test") ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1751,8 +1787,16 @@ public void discoverSearchObjectsTestWithScope() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a scope 'test' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("scope", "test")) @@ -1780,13 +1824,7 @@ public void discoverSearchObjectsTestWithScope() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -1835,9 +1873,17 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { context.restoreAuthSystemState(); // ** WHEN ** - // An anonymous user browses this endpoint to find the the objects in the system + // An anonymous user browses this endpoint to find the objects in the system // With dsoType 'item' + List> allExpectedSidebarFacetsWithDsoTypeItem = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypeItem.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Item")) @@ -1860,17 +1906,20 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypeItem))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); // With dsoTypes 'community' and 'collection' + List> allExpectedSidebarFacetsWithDsoTypesComCol = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypesComCol.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Community") .param("dsoType", "Collection")) @@ -1895,17 +1944,21 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypesComCol))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); // With dsoTypes 'collection' and 'item' + List> allExpectedSidebarFacetsWithDsoTypesColItem = + new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypesColItem.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Collection") .param("dsoType", "Item")) @@ -1931,17 +1984,21 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypesColItem))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); // With dsoTypes 'community', 'collection' and 'item' + List> allExpectedSidebarFacetsWithDsoTypesComColItem = + new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacetsWithDsoTypesComColItem.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Community") .param("dsoType", "Collection") @@ -1971,13 +2028,8 @@ public void discoverSearchObjectsTestWithDsoType() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", + Matchers.containsInAnyOrder(allExpectedSidebarFacetsWithDsoTypesComColItem))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))); } @@ -2024,9 +2076,17 @@ public void discoverSearchObjectsTestWithDsoTypeAndSort() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a dsoType 'item' //And a sort on the dc.title ascending + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("dsoType", "Item") .param("sort", "dc.title,ASC")) @@ -2058,13 +2118,7 @@ public void discoverSearchObjectsTestWithDsoTypeAndSort() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //We want to get the sort that's been used as well in the response .andExpect(jsonPath("$.sort", is( SortOptionMatcher.sortByAndOrder("dc.title", "ASC") @@ -2246,8 +2300,16 @@ public void discoverSearchObjectsTestForPaginationAndNextLinks() throws Exceptio context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(true), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("size", "2") .param("page", "1")) @@ -2270,13 +2332,7 @@ public void discoverSearchObjectsTestForPaginationAndNextLinks() throws Exceptio SearchResultMatcher.match(), SearchResultMatcher.match() ))) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(true), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2339,8 +2395,16 @@ public void discoverSearchObjectsTestWithContentInABitstream() throws Exception context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query stating 'ThisIsSomeDummyText' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("query", "ThisIsSomeDummyText")) @@ -2360,13 +2424,7 @@ public void discoverSearchObjectsTestWithContentInABitstream() throws Exception //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2419,8 +2477,15 @@ public void discoverSearchObjectsTestForEmbargoedItemsAndPrivateItems() throws E //Turn on the authorization again context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system - // + //An anonymous user browses this endpoint to find the objects in the system + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects")) //** THEN ** //The status has to be 200 OK @@ -2450,13 +2515,7 @@ public void discoverSearchObjectsTestForEmbargoedItemsAndPrivateItems() throws E ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2514,7 +2573,7 @@ public void discoverSearchObjectsTestWithContentInAPrivateBitstream() throws Exc context.restoreAuthSystemState(); context.setCurrentUser(null); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 getClient().perform(get("/api/discover/search/objects") .param("query", "ThisIsSomeDummyText")) @@ -2591,8 +2650,16 @@ public void discoverSearchObjectsTestForScope() throws Exception { UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the scope given + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("scope", String.valueOf(scope))) //** THEN ** @@ -2613,13 +2680,7 @@ public void discoverSearchObjectsTestForScope() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2670,8 +2731,16 @@ public void discoverSearchObjectsTestForScopeWithPrivateItem() throws Exception UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("scope", String.valueOf(scope))) //** THEN ** @@ -2698,13 +2767,7 @@ public void discoverSearchObjectsTestForScopeWithPrivateItem() throws Exception )))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2856,8 +2919,16 @@ public void discoverSearchObjectsTestForHitHighlights() throws Exception { String query = "Public"; //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query stating 'public' + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("query", query)) //** THEN ** @@ -2878,13 +2949,7 @@ public void discoverSearchObjectsTestForHitHighlights() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -2934,7 +2999,7 @@ public void discoverSearchObjectsTestForHitHighlightsWithPrivateItem() throws Ex String query = "Public"; //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a query stating 'Public' getClient().perform(get("/api/discover/search/objects") .param("query", query)) @@ -3000,10 +3065,17 @@ public void discoverSearchObjectsWithQueryOperatorContains_query() throws Except context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test*,query")) //** THEN ** @@ -3022,13 +3094,7 @@ public void discoverSearchObjectsWithQueryOperatorContains_query() throws Except ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3076,10 +3142,17 @@ public void discoverSearchObjectsWithQueryOperatorContains() throws Exception { context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,contains")) //** THEN ** @@ -3098,14 +3171,9 @@ public void discoverSearchObjectsWithQueryOperatorContains() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) - //There always needs to be a self link available + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) + + //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3151,10 +3219,17 @@ public void discoverSearchObjectsWithQueryOperatorNotContains_query() throws Exc context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "-test*,query")) //** THEN ** @@ -3172,13 +3247,7 @@ public void discoverSearchObjectsWithQueryOperatorNotContains_query() throws Exc ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3226,10 +3295,17 @@ public void discoverSearchObjectsWithQueryOperatorNotContains() throws Exception context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,notcontains")) //** THEN ** @@ -3247,13 +3323,8 @@ public void discoverSearchObjectsWithQueryOperatorNotContains() throws Exception ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) + //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3308,8 +3379,16 @@ public void discoverSearchObjectsTestForMinMaxValues() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("size", "2") .param("page", "1")) @@ -3332,13 +3411,7 @@ public void discoverSearchObjectsTestForMinMaxValues() throws Exception { SearchResultMatcher.match(), SearchResultMatcher.match() ))) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3393,21 +3466,23 @@ public void discoverSearchFacetsTestForMinMaxValues() throws Exception { context.restoreAuthSystemState(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With a size 2 + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(true), + FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/facets")) //** THEN ** //The status has to be 200 OK .andExpect(status().isOk()) //The type has to be 'discover' .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacetWithMinMax(true, "Doe, Jane", "Testing, Works"), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(true), - FacetEntryMatcher.dateIssuedFacetWithMinMax(false, "1990-02-13", "2010-10-17"), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/facets"))) ; @@ -3454,10 +3529,17 @@ public void discoverSearchObjectsWithQueryOperatorEquals_query() throws Exceptio context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "Test,query")) //** THEN ** @@ -3475,13 +3557,7 @@ public void discoverSearchObjectsWithQueryOperatorEquals_query() throws Exceptio ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3529,10 +3605,17 @@ public void discoverSearchObjectsWithQueryOperatorEquals() throws Exception { context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "Test,equals")) //** THEN ** @@ -3550,13 +3633,7 @@ public void discoverSearchObjectsWithQueryOperatorEquals() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3603,10 +3680,17 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals_query() throws Excep context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "-Test,query")) //** THEN ** @@ -3625,13 +3709,7 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals_query() throws Excep ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3679,10 +3757,17 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals() throws Exception { context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "Test,notequals")) //** THEN ** @@ -3701,13 +3786,7 @@ public void discoverSearchObjectsWithQueryOperatorNotEquals() throws Exception { ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3754,10 +3833,17 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority_query() throws Ex context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "-id:test,query")) //** THEN ** @@ -3775,13 +3861,7 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority_query() throws Ex ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3829,10 +3909,17 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority() throws Exceptio context.restoreAuthSystemState(); - UUID scope = col2.getID(); //** WHEN ** - //An anonymous user browses this endpoint to find the the objects in the system + //An anonymous user browses this endpoint to find the objects in the system //With the given search filter + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.entityTypeFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false) + )); getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,notauthority")) //** THEN ** @@ -3850,13 +3937,7 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority() throws Exceptio ))) //These facets have to show up in the embedded.facets section as well with the given hasMore property // because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.entityTypeFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -3866,7 +3947,7 @@ public void discoverSearchObjectsWithQueryOperatorNotAuthority() throws Exceptio @Test public void discoverSearchObjectsWithMissingQueryOperator() throws Exception { //** WHEN ** - // An anonymous user browses this endpoint to find the the objects in the system + // An anonymous user browses this endpoint to find the objects in the system // With the given search filter where there is the filter operator missing in the value (must be of form // <:filter-value>,<:filter-operator>) getClient().perform(get("/api/discover/search/objects") @@ -3879,10 +3960,10 @@ public void discoverSearchObjectsWithMissingQueryOperator() throws Exception { @Test public void discoverSearchObjectsWithNotValidQueryOperator() throws Exception { //** WHEN ** - // An anonymous user browses this endpoint to find the the objects in the system + // An anonymous user browses this endpoint to find the objects in the system // With the given search filter where there is a non-valid filter operator given (must be of form // <:filter-value>,<:filter-operator> where the filter operator is one of: “contains”, “notcontains”, "equals" - // “notequals”, “authority”, “notauthority”, "query”); see enum RestSearchOperator + // “notequals”, “authority”, “notauthority”, "query"); see enum RestSearchOperator getClient().perform(get("/api/discover/search/objects") .param("f.title", "test,operator")) //** THEN ** @@ -4180,8 +4261,8 @@ public void discoverSearchObjectsTestWithUnEscapedLuceneCharactersTest() throws @Test /** - * This test is intent to verify that inprogress submission (workspaceitem, workflowitem, pool task and claimed - * tasks) don't interfers with the standard search + * This test is intended to verify that an in progress submission (workspaceitem, workflowitem, pool task and + * claimed tasks) don't interfere with the standard search * * @throws Exception */ @@ -4231,7 +4312,7 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .build(); @@ -4246,7 +4327,7 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception ClaimedTask cTask = ClaimedTaskBuilder.createClaimedTask(context, col2, admin).withTitle("Claimed Item") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withTitle("Admin Workspace Item 1").build(); @@ -4261,7 +4342,15 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception //** WHEN ** // An anonymous user, the submitter and the admin that browse this endpoint to find the public objects in the - // system should not retrieve the inprogress submissions and related objects + // system should not retrieve the in progress submissions and related objects + List> allExpectedSidebarFacets = new ArrayList<>(customSidebarFacets); + allExpectedSidebarFacets.addAll(List.of( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )); String[] tokens = new String[] { null, getAuthToken(eperson.getEmail(), password), @@ -4297,13 +4386,7 @@ public void discoverSearchObjectsWithInProgressSubmissionTest() throws Exception ))) //These facets have to show up in the embedded.facets section as well with the given hasMore // property because we don't exceed their default limit for a hasMore true (the default is 10) - .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder( - FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.subjectFacet(false), - FacetEntryMatcher.dateIssuedFacet(false), - FacetEntryMatcher.hasContentInOriginalBundleFacet(false), - FacetEntryMatcher.entityTypeFacet(false) - ))) + .andExpect(jsonPath("$._embedded.facets", Matchers.containsInAnyOrder(allExpectedSidebarFacets))) //There always needs to be a self link .andExpect(jsonPath("$._links.self.href", containsString("/api/discover/search/objects"))) ; @@ -4366,7 +4449,7 @@ public void discoverSearchObjectsWorkspaceConfigurationTest() throws Exception { .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from our submitter user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from our submitter user (2 ws, 1 wf that will produce also a pooltask) WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .withIssueDate("2010-07-23") .build(); @@ -4384,7 +4467,7 @@ public void discoverSearchObjectsWorkspaceConfigurationTest() throws Exception { .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") @@ -4568,7 +4651,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .withIssueDate("2010-07-23") @@ -4587,7 +4670,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") @@ -4601,7 +4684,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { .withIssueDate("2010-11-03") .withTitle("Admin Workflow Item 1").build(); - // 6. a pool taks in the second step of the workflow + // 6. a pool task in the second step of the workflow ClaimedTask cTask2 = ClaimedTaskBuilder.createClaimedTask(context, col2, admin).withTitle("Pool Step2 Item") .withIssueDate("2010-11-04") .build(); @@ -4628,7 +4711,7 @@ public void discoverSearchObjectsWorkflowConfigurationTest() throws Exception { // 1 pool task in step 1, submitted by the same regular submitter // 1 pool task in step 1, submitted by the admin // 1 claimed task in the first workflow step from the repository admin - // 1 pool task task in step 2, from the repository admin + // 1 pool task in step 2, from the repository admin // (This one is created by creating a claimed task for step 1 and approving it) //** WHEN ** @@ -4838,7 +4921,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1).withTitle("Workspace Item 1") .withIssueDate("2010-07-23") @@ -4857,7 +4940,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") @@ -4871,7 +4954,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti .withIssueDate("2010-11-03") .withTitle("Admin Workflow Item 1").build(); - // 6. a pool taks in the second step of the workflow + // 6. a pool task in the second step of the workflow ClaimedTask cTask2 = ClaimedTaskBuilder.createClaimedTask(context, col2, admin).withTitle("Pool Step2 Item") .withIssueDate("2010-11-04") .build(); @@ -4898,7 +4981,7 @@ public void discoverSearchObjectsWorkflowAdminConfigurationTest() throws Excepti // 1 pool task in step 1, submitted by the same regular submitter // 1 pool task in step 1, submitted by the admin // 1 claimed task in the first workflow step from the repository admin - // 1 pool task task in step 2, from the repository admin + // 1 pool task in step 2, from the repository admin // (This one is created by creating a claimed task for step 1 and approving it) //** WHEN ** @@ -6586,7 +6669,7 @@ public void discoverSearchObjectsSupervisionConfigurationTest() throws Exception .withSubject("ExtraEntry") .build(); - //3. three inprogress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) + //3. three in progress submission from a normal user (2 ws, 1 wf that will produce also a pooltask) context.setCurrentUser(eperson); WorkspaceItem wsItem1 = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withTitle("Workspace Item 1") @@ -6616,7 +6699,7 @@ public void discoverSearchObjectsSupervisionConfigurationTest() throws Exception .withIssueDate("2010-11-03") .build(); - // 5. other inprogress submissions made by the administrator + // 5. other in progress submissions made by the administrator context.setCurrentUser(admin); WorkspaceItem wsItem1Admin = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withIssueDate("2010-07-23") From e7786f49afefc031363afef9772eb7184187b633 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Wed, 1 Nov 2023 00:43:17 +0100 Subject: [PATCH 565/686] 107671: Expose the handle.canonical.prefix to the frontend (cherry picked from commit 6d86e65b720b5108e94b1df85e6038394c183214) --- dspace/config/modules/rest.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index b08f8d514536..537eedbd087b 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -54,6 +54,7 @@ rest.properties.exposed = google.recaptcha.mode rest.properties.exposed = cc.license.jurisdiction rest.properties.exposed = identifiers.item-status.register-doi rest.properties.exposed = authentication-password.domain.valid +rest.properties.exposed = handle.canonical.prefix #---------------------------------------------------------------# # These configs are used by the deprecated REST (v4-6) module # From a7f106da926bce170da9e0284898a9aa41a6e06c Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Wed, 1 Nov 2023 00:47:31 +0100 Subject: [PATCH 566/686] Remove line breaks from default.license because they are being rendered in the frontend (cherry picked from commit 56aae347c2a7012af912a8893142fc04809e7ff3) --- dspace/config/default.license | 44 +++++++++++------------------------ 1 file changed, 13 insertions(+), 31 deletions(-) diff --git a/dspace/config/default.license b/dspace/config/default.license index 0b5b3cb4b8f1..390e9786688d 100644 --- a/dspace/config/default.license +++ b/dspace/config/default.license @@ -3,34 +3,16 @@ This sample license is provided for informational purposes only. NON-EXCLUSIVE DISTRIBUTION LICENSE -By signing and submitting this license, you (the author(s) or copyright -owner) grants to DSpace University (DSU) the non-exclusive right to reproduce, -translate (as defined below), and/or distribute your submission (including -the abstract) worldwide in print and electronic format and in any medium, -including but not limited to audio or video. - -You agree that DSU may, without changing the content, translate the -submission to any medium or format for the purpose of preservation. - -You also agree that DSU may keep more than one copy of this submission for -purposes of security, back-up and preservation. - -You represent that the submission is your original work, and that you have -the right to grant the rights contained in this license. You also represent -that your submission does not, to the best of your knowledge, infringe upon -anyone's copyright. - -If the submission contains material for which you do not hold copyright, -you represent that you have obtained the unrestricted permission of the -copyright owner to grant DSU the rights required by this license, and that -such third-party owned material is clearly identified and acknowledged -within the text or content of the submission. - -IF THE SUBMISSION IS BASED UPON WORK THAT HAS BEEN SPONSORED OR SUPPORTED -BY AN AGENCY OR ORGANIZATION OTHER THAN DSU, YOU REPRESENT THAT YOU HAVE -FULFILLED ANY RIGHT OF REVIEW OR OTHER OBLIGATIONS REQUIRED BY SUCH -CONTRACT OR AGREEMENT. - -DSU will clearly identify your name(s) as the author(s) or owner(s) of the -submission, and will not make any alteration, other than as allowed by this -license, to your submission. +By signing and submitting this license, you (the author(s) or copyright owner) grants to DSpace University (DSU) the non-exclusive right to reproduce, translate (as defined below), and/or distribute your submission (including the abstract) worldwide in print and electronic format and in any medium, including but not limited to audio or video. + +You agree that DSU may, without changing the content, translate the submission to any medium or format for the purpose of preservation. + +You also agree that DSU may keep more than one copy of this submission for purposes of security, back-up and preservation. + +You represent that the submission is your original work, and that you have the right to grant the rights contained in this license. You also represent that your submission does not, to the best of your knowledge, infringe upon anyone's copyright. + +If the submission contains material for which you do not hold copyright, you represent that you have obtained the unrestricted permission of the copyright owner to grant DSU the rights required by this license, and that such third-party owned material is clearly identified and acknowledged within the text or content of the submission. + +IF THE SUBMISSION IS BASED UPON WORK THAT HAS BEEN SPONSORED OR SUPPORTED BY AN AGENCY OR ORGANIZATION OTHER THAN DSU, YOU REPRESENT THAT YOU HAVE FULFILLED ANY RIGHT OF REVIEW OR OTHER OBLIGATIONS REQUIRED BY SUCH CONTRACT OR AGREEMENT. + +DSU will clearly identify your name(s) as the author(s) or owner(s) of the submission, and will not make any alteration, other than as allowed by this license, to your submission. From 31ee8712a72f4714a5dcaab735ce5e953e24ec46 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Tue, 14 Nov 2023 08:35:23 +0100 Subject: [PATCH 567/686] [DSC-1351] checkstyle fix --- dspace-api/src/main/java/org/dspace/curate/Curator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index 6992e46efdfb..dc160c336d30 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -493,7 +493,7 @@ protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, UUIDIterator collectionsIter = new UUIDIterator(curationContext(), comm.getCollections(), Collection.class); - + if (!tr.run(comm)) { return false; } From f5705e64e55f28548722b3b68ae291c316de2190 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Tue, 14 Nov 2023 11:27:07 +0100 Subject: [PATCH 568/686] [DSC-1351] fixed IT failing for curation tasks --- .../src/test/java/org/dspace/curate/CurationScriptIT.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java index a528f4351356..30f7f99857f9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java @@ -77,6 +77,7 @@ public void curateScript_invalidTaskOption() throws Exception { parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); parameters.add(new DSpaceCommandLineParameter("-t", "invalidTaskOption")); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); List list = parameters.stream() .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter @@ -100,6 +101,7 @@ public void curateScript_MissingHandle() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); List list = parameters.stream() .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter @@ -121,6 +123,7 @@ public void curateScript_invalidHandle() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", "invalidhandle")); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); List list = parameters.stream() @@ -160,6 +163,7 @@ public void curateScript_MissingTaskOrTaskFile() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); List list = parameters.stream() .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter @@ -205,6 +209,7 @@ public void curateScript_InvalidTaskFile() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", "all")); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-T", "invalidTaskFile")); List list = parameters.stream() @@ -245,6 +250,7 @@ public void curateScript_validRequest_Task() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); List list = parameters.stream() @@ -296,6 +302,7 @@ public void curateScript_validRequest_TaskFile() throws Exception { LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-T", taskFile.getAbsolutePath())); List list = parameters.stream() @@ -346,6 +353,7 @@ public void curateScript_EPersonInParametersFails() throws Exception { parameters.add(new DSpaceCommandLineParameter("-e", eperson.getEmail())); parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle())); + parameters.add(new DSpaceCommandLineParameter("-s", "open")); parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0))); List list = parameters.stream() From ed9d13f771409424d949adceb88d255d11a06b21 Mon Sep 17 00:00:00 2001 From: Alexander K Date: Tue, 14 Nov 2023 16:33:31 +0100 Subject: [PATCH 569/686] Revert "[DSC-1189] change pattern for get thumbnails" This reverts commit f687063588d3ea722750909b5a7bd1d2a846c5ed. --- .../src/main/java/org/dspace/content/BitstreamServiceImpl.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 0b7ce4b097cf..b07f23ee23ff 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -416,8 +416,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + - (bitstream.getName() != null ? Pattern.quote(bitstream.getName()) : bitstream.getName()) + ".([^.]+)$"); + Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { From ba10597d35a8d6e788430d44f04576b48ba7e586 Mon Sep 17 00:00:00 2001 From: Alexander K Date: Tue, 14 Nov 2023 17:22:39 +0100 Subject: [PATCH 570/686] [DSC-1189] update from issues #9113 --- .../dspace/content/BitstreamServiceImpl.java | 2 +- .../app/rest/BitstreamRestRepositoryIT.java | 47 +++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index b07f23ee23ff..51bf60c38fea 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -416,7 +416,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + Pattern pattern = Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index b850d973e4f3..68cc61c8a8c8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -1742,6 +1742,53 @@ public void thumbnailEndpointTest() throws Exception { .andExpect(jsonPath("$.type", is("bitstream"))); } + @Test + public void thumbnailEndpointTestWithSpecialCharactersInFileName() throws Exception { + // Given an Item + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1").build(); + + Item item = ItemBuilder.createItem(context, col1) + .withTitle("Test item -- thumbnail") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + + Bundle originalBundle = BundleBuilder.createBundle(context, item) + .withName(Constants.DEFAULT_BUNDLE_NAME) + .build(); + Bundle thumbnailBundle = BundleBuilder.createBundle(context, item) + .withName("THUMBNAIL") + .build(); + + InputStream is = IOUtils.toInputStream("dummy", "utf-8"); + + // With an ORIGINAL Bitstream & matching THUMBNAIL Bitstream containing special characters in filenames + Bitstream bitstream = BitstreamBuilder.createBitstream(context, originalBundle, is) + .withName("test (2023) file.pdf") + .withMimeType("application/pdf") + .build(); + Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, is) + .withName("test (2023) file.pdf.jpg") + .withMimeType("image/jpeg") + .build(); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/thumbnail")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.uuid", Matchers.is(thumbnail.getID().toString()))) + .andExpect(jsonPath("$.type", is("bitstream"))); + } + @Test public void thumbnailEndpointMultipleThumbnailsWithPrimaryBitstreamTest() throws Exception { // Given an Item From 821dd512be66d46e46288de4b17f809fe58b309a Mon Sep 17 00:00:00 2001 From: Alexander K Date: Tue, 14 Nov 2023 17:24:38 +0100 Subject: [PATCH 571/686] [DSC-1189] update from issues #9113 --- .../main/java/org/dspace/content/BitstreamServiceImpl.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 51bf60c38fea..d5e198778a45 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -416,7 +416,9 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); + Pattern pattern = Pattern.compile("^" + + (bitstream.getName() != null ? Pattern.quote(bitstream.getName()) : bitstream.getName()) + + ".([^.]+)$"); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { From dbbb7d0911790dcaff7617ec9b7e578a42e395e2 Mon Sep 17 00:00:00 2001 From: Alexander K Date: Tue, 14 Nov 2023 17:25:41 +0100 Subject: [PATCH 572/686] [DSC-1189] update from issues #9113 --- .../java/org/dspace/content/BitstreamServiceImpl.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index d5e198778a45..9eb9145f277b 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -416,9 +416,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + - (bitstream.getName() != null ? Pattern.quote(bitstream.getName()) : bitstream.getName()) - + ".([^.]+)$"); + Pattern pattern = getBitstreamNamePattern(bitstream); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { @@ -448,6 +446,13 @@ public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLEx return null; } + protected Pattern getBitstreamNamePattern(Bitstream bitstream) { + if (bitstream.getName() != null) { + return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); + } + return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + } + @Override public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException { if (bitstream.getBitstreamFormat() == null) { From 0aef9c7818e4da1321a972b41c3d160447efa2c8 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 14 Nov 2023 18:52:01 +0100 Subject: [PATCH 573/686] [DSC-1053] Introduces send and forget behavior for viewevents --- .../dspace/content/BitstreamServiceImpl.java | 4 ++ .../org/dspace/content/BundleServiceImpl.java | 4 ++ .../dspace/content/CollectionServiceImpl.java | 4 ++ .../dspace/content/CommunityServiceImpl.java | 5 ++ .../org/dspace/content/ItemServiceImpl.java | 4 ++ .../org/dspace/content/SiteServiceImpl.java | 4 ++ .../content/service/DSpaceObjectService.java | 2 + .../org/dspace/core/AbstractHibernateDAO.java | 46 ++++++++++++++++++- .../main/java/org/dspace/core/GenericDAO.java | 11 +++++ .../dspace/eperson/EPersonServiceImpl.java | 4 ++ .../org/dspace/eperson/GroupServiceImpl.java | 4 ++ .../java/org/dspace/usage/UsageEvent.java | 14 ++++++ .../repository/ViewEventRestRepository.java | 14 +++--- .../org/dspace/services/EventService.java | 4 ++ .../services/events/SystemEventService.java | 9 ++++ 15 files changed, 125 insertions(+), 8 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index b07f23ee23ff..b399a27cde1a 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -533,6 +533,10 @@ public List findByItemAndBundleAndMetadata(Context context, Item item } + public boolean exists(Context context, UUID id) throws SQLException { + return this.bitstreamDAO.exists(context, Bitstream.class, id); + } + private boolean isContainedInBundleNamed(Bitstream bitstream, String name) { if (StringUtils.isEmpty(name)) { diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 485f1d645130..8728968e79a4 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -544,4 +544,8 @@ public Bundle findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return bundleDAO.countRows(context); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.bundleDAO.exists(context, Bundle.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 367c7a5d34b1..b10567b070e4 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1217,4 +1217,8 @@ public int countCollectionsAdministeredByEntityType(String query, String entityT discoverQuery, query, entityType).getTotalSearchResults(); } + public boolean exists(Context context, UUID id) throws SQLException { + return this.collectionDAO.exists(context, Collection.class, id); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index b4053a724f32..022d1291dbb2 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -712,4 +712,9 @@ public Community findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return communityDAO.countRows(context); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.communityDAO.exists(context, Community.class, id); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index cfad7f87dba5..ed44e86f5585 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -2144,4 +2144,8 @@ public void addResourcePolicy(Context context, Item item, int actionID, EPerson item.getResourcePolicies().add(resourcePolicy); } + public boolean exists(Context context, UUID id) throws SQLException { + return this.itemDAO.exists(context, Item.class, id); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java index 2f53ed0928a2..2b7aa368a5f1 100644 --- a/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/SiteServiceImpl.java @@ -105,4 +105,8 @@ public void delete(Context context, Site dso) throws SQLException, AuthorizeExce public int getSupportsTypeConstant() { return Constants.SITE; } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.siteDAO.exists(context, Site.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java index 968768681821..aa2911edfb7c 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java @@ -584,4 +584,6 @@ default void addAndShiftRightSecuredMetadata(Context context, T dso, String sche throws SQLException { } + + boolean exists(Context context, UUID id) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 32ad747d765e..b60b94667794 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -7,16 +7,23 @@ */ package org.dspace.core; +import java.lang.reflect.Field; import java.sql.SQLException; +import java.util.Arrays; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.UUID; import java.util.stream.Stream; +import javax.persistence.Column; +import javax.persistence.Id; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Expression; +import javax.persistence.criteria.Path; import javax.persistence.criteria.Root; import com.google.common.collect.AbstractIterator; @@ -34,7 +41,6 @@ public abstract class AbstractHibernateDAO implements GenericDAO { protected AbstractHibernateDAO() { - } @Override @@ -95,6 +101,44 @@ public T findByID(Context context, Class clazz, UUID id) throws SQLException { return result; } + public static List getAllFields(List fields, Class type) { + fields.addAll(Arrays.asList(type.getDeclaredFields())); + + if (type.getSuperclass() != null) { + getAllFields(fields, type.getSuperclass()); + } + + return fields; + } + + @Override + public boolean exists(Context context, Class clazz, UUID id) throws SQLException { + if (id == null) { + return false; + } + Optional optionalField = + getAllFields(new LinkedList<>(), clazz) + .stream() + .filter(field -> field.isAnnotationPresent(Id.class) && field.isAnnotationPresent(Column.class)) + .findFirst(); + if (optionalField.isEmpty()) { + return false; + } + + Field idField = optionalField.get(); + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, clazz); + + Root root = criteriaQuery.from(clazz); + Path idColumn = root.get(idField.getName()); + criteriaQuery.select(idColumn); + criteriaQuery.where(criteriaBuilder.equal(idColumn, id)); + + org.hibernate.query.Query query = getHibernateSession(context).createQuery(criteriaQuery); + query.setMaxResults(1); + return query.getSingleResult() != null; + } + @Override public T findByID(Context context, Class clazz, int id) throws SQLException { @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/core/GenericDAO.java b/dspace-api/src/main/java/org/dspace/core/GenericDAO.java index a04a0ccbdcc8..bba0281b119c 100644 --- a/dspace-api/src/main/java/org/dspace/core/GenericDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/GenericDAO.java @@ -91,6 +91,17 @@ public interface GenericDAO { */ public T findByID(Context context, Class clazz, int id) throws SQLException; + /** + * Checks if a given id of a target entity with the clazz type exists in the database. + * + * @param context current DSpace context + * @param clazz entity class + * @param id identifier of the entity + * @return true if found, false otherwise + * @throws SQLException + */ + boolean exists(Context context, Class clazz, UUID id) throws SQLException; + /** * Fetch the entity identified by its UUID primary key. * diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index cf62c805b12c..2d213f254f04 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -609,4 +609,8 @@ private List getDSpaceObjectOwnerMetadataValues(Item item) { public String getName(EPerson dso) { return dso.getName(); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.ePersonDAO.exists(context, EPerson.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 9fda372b4f2a..889145f42054 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -840,4 +840,8 @@ public List findByMetadataField(final Context context, final String searc public String getName(Group dso) { return dso.getName(); } + + public boolean exists(Context context, UUID id) throws SQLException { + return this.groupDAO.exists(context, Group.class, id); + } } diff --git a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java index ed137e9d6d8c..7c04fc0af552 100644 --- a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java +++ b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java @@ -7,9 +7,12 @@ */ package org.dspace.usage; +import java.sql.SQLException; +import java.util.UUID; import javax.servlet.http.HttpServletRequest; import org.dspace.content.DSpaceObject; +import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.model.Event; @@ -19,6 +22,17 @@ */ public class UsageEvent extends Event { + public static final UsageEvent createUsageEvent( + final Context context, final HttpServletRequest req, + final DSpaceObjectService dSpaceObjectService, final UUID targetId + ) { + try { + return new UsageEvent(UsageEvent.Action.VIEW, req, context, dSpaceObjectService.find(context, targetId)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + public static enum Action { VIEW("view"), CREATE("create"), diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java index e4214a4c9208..3231a52bd959 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java @@ -12,6 +12,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.UUID; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; @@ -21,7 +22,6 @@ import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.ViewEventRest; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Constants; @@ -51,7 +51,8 @@ public ViewEventRest createViewEvent() throws AuthorizeException, SQLException { } catch (IOException e1) { throw new UnprocessableEntityException("Error parsing request body", e1); } - if (viewEventRest.getTargetId() == null || StringUtils.isBlank(viewEventRest.getTargetType()) || + final UUID targetId = viewEventRest.getTargetId(); + if (targetId == null || StringUtils.isBlank(viewEventRest.getTargetType()) || !typeList.contains(viewEventRest.getTargetType().toUpperCase())) { throw new DSpaceBadRequestException("The given ViewEvent was invalid, one or more properties are either" + " wrong or missing"); @@ -59,13 +60,12 @@ public ViewEventRest createViewEvent() throws AuthorizeException, SQLException { DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance().getDSpaceObjectService( Constants.getTypeID(viewEventRest.getTargetType().toUpperCase(Locale.getDefault()))); - DSpaceObject dSpaceObject = dSpaceObjectService.find(context, viewEventRest.getTargetId()); - if (dSpaceObject == null) { + if (!dSpaceObjectService.exists(context, targetId)) { throw new UnprocessableEntityException( - "The given targetId does not resolve to a DSpaceObject: " + viewEventRest.getTargetId()); + "The given targetId does not resolve to a DSpaceObject: " + targetId); } - UsageEvent usageEvent = new UsageEvent(UsageEvent.Action.VIEW, req, context, dSpaceObject); - eventService.fireEvent(usageEvent); + eventService.handleObjectEvent(() -> UsageEvent.createUsageEvent(context, req, dSpaceObjectService, targetId)); return viewEventRest; } + } diff --git a/dspace-services/src/main/java/org/dspace/services/EventService.java b/dspace-services/src/main/java/org/dspace/services/EventService.java index 92080f0358b0..46f21c640b95 100644 --- a/dspace-services/src/main/java/org/dspace/services/EventService.java +++ b/dspace-services/src/main/java/org/dspace/services/EventService.java @@ -7,6 +7,8 @@ */ package org.dspace.services; +import java.util.function.Supplier; + import org.dspace.services.model.Event; import org.dspace.services.model.EventListener; @@ -34,4 +36,6 @@ public interface EventService { */ public void registerEventListener(EventListener listener); + void handleObjectEvent(Supplier eventSupplier); + } diff --git a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java index 1787c688f6a1..eafc55bafe76 100644 --- a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java +++ b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java @@ -10,6 +10,9 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.Supplier; import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; @@ -42,6 +45,8 @@ public final class SystemEventService implements EventService { private final RequestService requestService; private EventRequestInterceptor requestInterceptor; + private final ExecutorService executorService = Executors.newFixedThreadPool(2); + @Autowired(required = true) public SystemEventService(RequestService requestService) { if (requestService == null) { @@ -83,6 +88,10 @@ public void fireEvent(Event event) { } } + public void handleObjectEvent(Supplier eventSupplier) { + this.executorService.submit(() -> this.fireEvent(eventSupplier.get())); + } + /* (non-Javadoc) * @see org.dspace.services.EventService#registerEventListener(org.dspace.services.model.EventListener) */ From 64d30e6fdf41c237bd7aa976e4592d5669a6a506 Mon Sep 17 00:00:00 2001 From: DSpace Bot <68393067+dspace-bot@users.noreply.github.com> Date: Tue, 14 Nov 2023 14:19:50 -0600 Subject: [PATCH 574/686] [Port dspace-7_x] subscription email: do not send email if nothing has changed (#9204) * improved subscriptions email template (cherry picked from commit 6e7b32795930ea3c0758875c654e95b8602cf9b3) * do not send emails without content (cherry picked from commit 926b2421302587d69318f208b48e334cab57b204) * fixed coding style violations (cherry picked from commit fdacec08df8f930ff7c45745a20297a10eff3ad2) * removed unnecessary isEmpty check as suggested by reviewer (cherry picked from commit 30a837f85403332c31761880a0ed936996ba1b5a) * moved null check on indexableObjects in generateBodyMail (cherry picked from commit b43c340b182a62ddc51e3fd7e294101f6d355e90) * fixed unhandled IOException (cherry picked from commit 9b3f7b698c3efbff165fb727ac91609dbe569218) * fixed typo in bodyCommunities (cherry picked from commit ac3d02eb1ce83f9e1fee55f69f1c0fd2c0e03332) * do not use != to compare strings (cherry picked from commit e46018333508215fce7225b75a5af50d4d7beb59) * fixed improper handling of empty list (cherry picked from commit ac72aae44b5ce49ec1a8ddfa2b7986d0d580a8ac) --------- Co-authored-by: Sascha Szott --- .../subscriptions/ContentGenerator.java | 34 +++++++++++-------- dspace/config/emails/subscriptions_content | 16 +++++---- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java index a913f2504a50..c3035614343b 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java @@ -56,8 +56,16 @@ public void notifyForSubscriptions(Context context, EPerson ePerson, Locale supportedLocale = I18nUtil.getEPersonLocale(ePerson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "subscriptions_content")); email.addRecipient(ePerson.getEmail()); - email.addArgument(generateBodyMail(context, indexableComm)); - email.addArgument(generateBodyMail(context, indexableColl)); + + String bodyCommunities = generateBodyMail(context, indexableComm); + String bodyCollections = generateBodyMail(context, indexableColl); + if (bodyCommunities.equals(EMPTY) && bodyCollections.equals(EMPTY)) { + log.debug("subscription(s) of eperson {} do(es) not match any new items: nothing to send" + + " - exit silently", ePerson::getID); + return; + } + email.addArgument(bodyCommunities); + email.addArgument(bodyCollections); email.send(); } } catch (Exception e) { @@ -67,21 +75,19 @@ public void notifyForSubscriptions(Context context, EPerson ePerson, } private String generateBodyMail(Context context, List indexableObjects) { + if (indexableObjects == null || indexableObjects.isEmpty()) { + return EMPTY; + } try { ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write("\n".getBytes(UTF_8)); - if (indexableObjects.size() > 0) { - for (IndexableObject indexableObject : indexableObjects) { - out.write("\n".getBytes(UTF_8)); - Item item = (Item) indexableObject.getIndexedObject(); - String entityType = itemService.getEntityTypeLabel(item); - Optional.ofNullable(entityType2Disseminator.get(entityType)) - .orElseGet(() -> entityType2Disseminator.get("Item")) - .disseminate(context, item, out); - } - return out.toString(); - } else { - out.write("No items".getBytes(UTF_8)); + for (IndexableObject indexableObject : indexableObjects) { + out.write("\n".getBytes(UTF_8)); + Item item = (Item) indexableObject.getIndexedObject(); + String entityType = itemService.getEntityTypeLabel(item); + Optional.ofNullable(entityType2Disseminator.get(entityType)) + .orElseGet(() -> entityType2Disseminator.get("Item")) + .disseminate(context, item, out); } return out.toString(); } catch (Exception e) { diff --git a/dspace/config/emails/subscriptions_content b/dspace/config/emails/subscriptions_content index a330c5953716..9b8c91e559df 100644 --- a/dspace/config/emails/subscriptions_content +++ b/dspace/config/emails/subscriptions_content @@ -2,15 +2,17 @@ ## ## Parameters: {0} Collections updates ## {1} Communities updates -#set($subject = "${config.get('dspace.name')} Subscription") - +#set($subject = "${config.get('dspace.name')} Subscriptions") This email is sent from ${config.get('dspace.name')} based on the chosen subscription preferences. -Communities ------------ +#if( not( "$params[0]" == "" )) +Community Subscriptions: +------------------------ List of changed items : ${params[0]} -Collections ------------ +#end +#if( not( "$params[1]" == "" )) +Collection Subscriptions: +------------------------- List of changed items : ${params[1]} - +#end From abc30a94b1832953656112401b5d7be10267857c Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 15 Nov 2023 10:39:08 +0100 Subject: [PATCH 575/686] [DSC-1053] Adds configurable thread pool size for async event schedule --- .../rest/repository/ViewEventRestRepository.java | 2 +- .../java/org/dspace/services/EventService.java | 7 ++++++- .../services/events/SystemEventService.java | 15 ++++++++++++++- dspace/config/dspace.cfg | 11 +++++++++++ 4 files changed, 32 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java index 1ed67526b14f..8755f10813ed 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ViewEventRestRepository.java @@ -65,7 +65,7 @@ public ViewEventRest createViewEvent() throws AuthorizeException, SQLException { "The given targetId does not resolve to a DSpaceObject: " + targetId); } final String referrer = viewEventRest.getReferrer(); - eventService.handleObjectEvent( + eventService.fireAsyncEvent( () -> UsageEvent.createUsageEvent(context, req, dSpaceObjectService, targetId, referrer) ); return viewEventRest; diff --git a/dspace-services/src/main/java/org/dspace/services/EventService.java b/dspace-services/src/main/java/org/dspace/services/EventService.java index 46f21c640b95..6cbc195656f0 100644 --- a/dspace-services/src/main/java/org/dspace/services/EventService.java +++ b/dspace-services/src/main/java/org/dspace/services/EventService.java @@ -36,6 +36,11 @@ public interface EventService { */ public void registerEventListener(EventListener listener); - void handleObjectEvent(Supplier eventSupplier); + /** + * Fires an event asynchronously by retrieving it from the given supplier + * + * @param eventSupplier + */ + void fireAsyncEvent(Supplier eventSupplier); } diff --git a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java index eafc55bafe76..84712927e212 100644 --- a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java +++ b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java @@ -16,8 +16,10 @@ import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.dspace.services.ConfigurationService; import org.dspace.services.EventService; import org.dspace.services.RequestService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.model.Event; import org.dspace.services.model.Event.Scope; import org.dspace.services.model.EventListener; @@ -35,6 +37,16 @@ */ public final class SystemEventService implements EventService { + private static final int DEFAULT_THREAD_SIZE = 2; + private static final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private static final int threadSize; + + static { + threadSize = configurationService.getIntProperty("system-event.thread.size", DEFAULT_THREAD_SIZE); + } + private final Logger log = LoggerFactory.getLogger(SystemEventService.class); /** @@ -88,7 +100,8 @@ public void fireEvent(Event event) { } } - public void handleObjectEvent(Supplier eventSupplier) { + @Override + public void fireAsyncEvent(Supplier eventSupplier) { this.executorService.submit(() -> this.fireEvent(eventSupplier.get())); } diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 35a47ac8f996..1f1e97b16d47 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1933,6 +1933,17 @@ bulk-export.limit.loggedIn = -1 # anonymous users bulk-export.limit.notLoggedIn = 0 +#------------------------------------------------------------------# +#-------------SYSTEM ASYNC EVENT CONFIGURATIONS--------------------# +#------------------------------------------------------------------# +# # +# Configurations for the SystemEventService # +# # +#------------------------------------------------------------------# +# By default the asynchronous executor that dispatches events +# has 2 threads on which schedule events +system-event.thread.size = 2 + # Load default module configs # ---------------------------- # To exclude a module configuration, simply comment out its "include" statement. From 78906c17294025783094c8727f0a85231b471abc Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Wed, 15 Nov 2023 11:24:18 +0100 Subject: [PATCH 576/686] [DSC-1351] remove unused code --- .../main/java/org/dspace/core/AbstractHibernateDAO.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 8db06988f3ae..7f3e9be7c2cb 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -34,14 +34,8 @@ */ public abstract class AbstractHibernateDAO implements GenericDAO { - private Class entityTypeClass; - protected AbstractHibernateDAO() { - Type type = getClass().getGenericSuperclass(); - if (type instanceof ParameterizedType) { - ParameterizedType paramType = (ParameterizedType) type; - entityTypeClass = (Class) paramType.getActualTypeArguments()[0]; - } + } @Override From 9b4623ad19ace86156448a5f352f4a9a2e3835a6 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 15 Nov 2023 11:47:19 +0100 Subject: [PATCH 577/686] [DSC-1053] Defers thread pool creation --- .../services/events/SystemEventService.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java index 84712927e212..efcd31cb570e 100644 --- a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java +++ b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java @@ -38,14 +38,6 @@ public final class SystemEventService implements EventService { private static final int DEFAULT_THREAD_SIZE = 2; - private static final ConfigurationService configurationService = - DSpaceServicesFactory.getInstance().getConfigurationService(); - - private static final int threadSize; - - static { - threadSize = configurationService.getIntProperty("system-event.thread.size", DEFAULT_THREAD_SIZE); - } private final Logger log = LoggerFactory.getLogger(SystemEventService.class); @@ -57,7 +49,7 @@ public final class SystemEventService implements EventService { private final RequestService requestService; private EventRequestInterceptor requestInterceptor; - private final ExecutorService executorService = Executors.newFixedThreadPool(2); + private ExecutorService executorService; @Autowired(required = true) public SystemEventService(RequestService requestService) { @@ -102,9 +94,19 @@ public void fireEvent(Event event) { @Override public void fireAsyncEvent(Supplier eventSupplier) { + initExecutor(); this.executorService.submit(() -> this.fireEvent(eventSupplier.get())); } + private void initExecutor() { + if (this.executorService != null) { + return; + } + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + int threadSize = configurationService.getIntProperty("system-event.thread.size", DEFAULT_THREAD_SIZE); + this.executorService = Executors.newFixedThreadPool(threadSize); + } + /* (non-Javadoc) * @see org.dspace.services.EventService#registerEventListener(org.dspace.services.model.EventListener) */ From c1178ff504b3fd73f81ca002345ef69b8d82894d Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Wed, 15 Nov 2023 12:01:12 +0100 Subject: [PATCH 578/686] [DSC-1351] checkstyle fix --- .../src/main/java/org/dspace/core/AbstractHibernateDAO.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 7f3e9be7c2cb..40825e77da2e 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -7,8 +7,6 @@ */ package org.dspace.core; -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; import java.sql.SQLException; import java.util.Iterator; import java.util.List; From 62739fa0ec4ab816bd6f4574a925875bf8d1d906 Mon Sep 17 00:00:00 2001 From: Stefano Maffei Date: Wed, 15 Nov 2023 16:21:31 +0100 Subject: [PATCH 579/686] [DSC-1375] filter-media bug fixing & improvement --- .../MediaFilterScriptConfiguration.java | 3 ++- .../app/mediafilter/MediaFilterServiceImpl.java | 16 +++++++++++----- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 867e684db86b..7465fa6e1279 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -33,7 +33,8 @@ public Options getOptions() { options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT"); options.addOption("q", "quiet", false, "do not print anything except in the event of errors."); options.addOption("f", "force", false, "force all bitstreams to be processed"); - options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier"); + options.addOption("i", "identifier", true, + "ONLY process bitstreams belonging to the provided handle identifier"); options.addOption("m", "maximum", true, "process no more than maximum items"); options.addOption("h", "help", false, "help"); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index e2c6c9c5db06..2085097beb1f 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -36,6 +36,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.SelfNamedPlugin; +import org.dspace.core.UUIDIterator; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -132,13 +133,17 @@ public void applyFiltersCommunity(Context context, Community community) throws Exception { //only apply filters if community not in skip-list if (!inSkipList(community.getHandle())) { List subcommunities = community.getSubcommunities(); - for (Community subcommunity : subcommunities) { - applyFiltersCommunity(context, subcommunity); + List collections = community.getCollections(); + + UUIDIterator communityIterator = new UUIDIterator<>(context, subcommunities, Community.class); + UUIDIterator collectionIterator = new UUIDIterator<>(context, collections, Collection.class); + + while (communityIterator.hasNext()) { + applyFiltersCommunity(context, communityIterator.next()); } - List collections = community.getCollections(); - for (Collection collection : collections) { - applyFiltersCollection(context, collection); + while (collectionIterator.hasNext()) { + applyFiltersCollection(context, collectionIterator.next()); } } } @@ -169,6 +174,7 @@ public void applyFiltersItem(Context c, Item item) throws Exception { } // clear item objects from context cache and internal cache c.uncacheEntity(currentItem); + c.commit(); currentItem = null; } } From 2a0d10ac792fe661bb7f0d89a21f1643a0f580c5 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Wed, 15 Nov 2023 19:53:52 +0100 Subject: [PATCH 580/686] [DSC-1371] Fix after merge --- .../subscriptions/SubscriptionEmailNotificationServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java index a834a6348edd..d11c7ab089ce 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java @@ -151,7 +151,7 @@ private void performForContent(Context context, DSpaceRunnableHandler handler, ); communityItems.clear(); collectionsItems.clear(); - items.clear(); + entityItemsByEntityType.clear(); } } else { //in the end of the iteration From e8b86fd5fd85bee8e7b73a4df8fee75b40cbc208 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 16 Nov 2023 12:56:04 +0100 Subject: [PATCH 581/686] [DSC-1205] Fixes title for orcid authority choices --- .../content/authority/OrcidAuthority.java | 12 +- .../org/dspace/app/rest/OrcidAuthorityIT.java | 153 +++++++++++++++--- 2 files changed, 136 insertions(+), 29 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java index 8978c1f90fcc..02ca7be701d2 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/OrcidAuthority.java @@ -43,6 +43,8 @@ public class OrcidAuthority extends ItemAuthority { private static final Logger LOGGER = LoggerFactory.getLogger(OrcidAuthority.class); + private static final String IS_LATIN_REGEX = "\\p{IsLatin}+"; + public static final String DEFAULT_ORCID_KEY = "person_identifier_orcid"; public static final String DEFAULT_INSTITUTION_KEY = "institution-affiliation-name"; @@ -118,9 +120,13 @@ private String getTitle(ExpandedResult result) { String givenName = result.getGivenNames(); String familyName = result.getFamilyNames(); - String title = isNotBlank(givenName) ? capitalizeFully(givenName) : ""; - title += isNotBlank(familyName) ? " " + capitalizeFully(familyName) : ""; - + String capitalizedFamilyName = capitalizeFully(familyName); + String capitalizedGivenName = capitalizeFully(givenName); + String title = capitalizedFamilyName + ", " + capitalizedGivenName; + if (!givenName.matches(IS_LATIN_REGEX) || !familyName.matches(IS_LATIN_REGEX)) { + title = isNotBlank(familyName) ? capitalizeFully(familyName) : ""; + title += isNotBlank(givenName) ? " " + capitalizeFully(givenName) : ""; + } return title.trim(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java index 33bb24f8b029..62cc6b11c32a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/OrcidAuthorityIT.java @@ -141,9 +141,9 @@ public void testWithWillBeGeneratedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", GENERATE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", GENERATE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", GENERATE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", GENERATE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -188,9 +188,9 @@ public void testWithWillBeReferencedAuthorityPrefix() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -237,9 +237,9 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(10))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -285,7 +285,7 @@ public void testWithPagination() throws Exception { affiliationEntry(author_2, "Author 2", ""), affiliationEntry(author_3, "Author 3", "OrgUnit_2::" + id(orgUnit_2)), affiliationEntry(author_4, "Author 4", "OrgUnit_1::" + id(orgUnit_1)), - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -298,8 +298,8 @@ public void testWithPagination() throws Exception { .param("size", "5")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(5))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -312,7 +312,7 @@ public void testWithPagination() throws Exception { .param("size", "6")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(6))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(2))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(7))); @@ -460,8 +460,8 @@ public void testWithoutClientIdConfiguration() throws Exception { .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( affiliationEntry(author_1, "Author 1", ""), affiliationEntry(author_2, "Author 2", ""), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444")))) + orcidEntry("From Orcid 1 Author", GENERATE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", GENERATE, "0000-2222-3333-4444")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(4))); @@ -500,8 +500,8 @@ public void testWithoutClientSecretConfiguration() throws Exception { .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( affiliationEntry(author_1, "Author 1", ""), affiliationEntry(author_2, "Author 2", ""), - orcidEntry("Author From Orcid 1", GENERATE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", GENERATE, "0000-2222-3333-4444")))) + orcidEntry("From Orcid 1 Author", GENERATE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", GENERATE, "0000-2222-3333-4444")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(4))); @@ -533,9 +533,9 @@ public void testWithComposedName() throws Exception { .param("filter", "John Bruce Wayne")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -568,9 +568,9 @@ public void testWithLastNameAndFirstNameSeparatedByComma() throws Exception { .param("filter", "Wayne, Bruce")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntry("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444"), - orcidEntry("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444"), + orcidEntry("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); @@ -581,6 +581,107 @@ public void testWithLastNameAndFirstNameSeparatedByComma() throws Exception { } + + @Test + public void testWithLatinValueLastNameAndFirstNameSeparatedByComma() throws Exception { + + String expectedQuery = "(given-names:Wayne+OR+family-name:Wayne+OR+other-names:Wayne)" + + "+AND+(given-names:Bruce+OR+family-name:Bruce+OR+other-names:Bruce)"; + + when(orcidClientMock.expandedSearch(eq(READ_PUBLIC_TOKEN), eq(expectedQuery), anyInt(), anyInt())) + .thenReturn(expandedSearch(0l, List.of())); + + List orcidSearchResults = List.of(expandedResult("Vincenzo", "Mecca", "0000-1111-2222-3333")); + + when(orcidClientMock.expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20)) + .thenReturn(expandedSearch(1, orcidSearchResults)); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/AuthorAuthority/entries") + .param("filter", "Wayne, Bruce")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + orcidEntry("Mecca, Vincenzo", REFERENCE, "0000-1111-2222-3333")))) + .andExpect(jsonPath("$.page.size", Matchers.is(20))) + .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20); + verifyNoMoreInteractions(orcidClientMock); + + } + + @Test + public void testWithNonLatinValueLastNameAndFirstNameSeparatedByComma() throws Exception { + + String expectedQuery = "(given-names:Wayne+OR+family-name:Wayne+OR+other-names:Wayne)" + + "+AND+(given-names:Bruce+OR+family-name:Bruce+OR+other-names:Bruce)"; + + when(orcidClientMock.expandedSearch(eq(READ_PUBLIC_TOKEN), eq(expectedQuery), anyInt(), anyInt())) + .thenReturn(expandedSearch(0l, List.of())); + + List orcidSearchResults = List.of( + expandedResult("Vins", "@4Science", "0000-1111-2222-3333"), + expandedResult("V1n5", "M3cc4", "0000-4444-5555-6666") + ); + + when(orcidClientMock.expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20)) + .thenReturn(expandedSearch(2, orcidSearchResults)); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/AuthorAuthority/entries") + .param("filter", "Wayne, Bruce")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + orcidEntry("@4science Vins", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("M3cc4 V1n5", REFERENCE, "0000-4444-5555-6666") + ))) + .andExpect(jsonPath("$.page.size", Matchers.is(20))) + .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testWithMultipleValueLastNameAndFirstNameSeparatedByComma() throws Exception { + + String expectedQuery = "(given-names:Wayne+OR+family-name:Wayne+OR+other-names:Wayne)" + + "+AND+(given-names:Bruce+OR+family-name:Bruce+OR+other-names:Bruce)"; + + when(orcidClientMock.expandedSearch(eq(READ_PUBLIC_TOKEN), eq(expectedQuery), anyInt(), anyInt())) + .thenReturn(expandedSearch(0l, List.of())); + + List orcidSearchResults = List.of( + expandedResult("Vincenzo", "Mecca", "0000-1111-2222-3333"), + expandedResult("Vins", "@4Science", "0000-4444-5555-6666"), + expandedResult("V1n5", "M3cc4", "0000-7777-8888-9999") + ); + + when(orcidClientMock.expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20)) + .thenReturn(expandedSearch(3, orcidSearchResults)); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/AuthorAuthority/entries") + .param("filter", "Wayne, Bruce")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + orcidEntry("Mecca, Vincenzo", REFERENCE, "0000-1111-2222-3333"), + orcidEntry("@4science Vins", REFERENCE, "0000-4444-5555-6666"), + orcidEntry("M3cc4 V1n5", REFERENCE, "0000-7777-8888-9999") + ))) + .andExpect(jsonPath("$.page.size", Matchers.is(20))) + .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).expandedSearch(READ_PUBLIC_TOKEN, expectedQuery, 0, 20); + verifyNoMoreInteractions(orcidClientMock); + } + @Test public void testWithAffiliationExtra() throws Exception { @@ -599,9 +700,9 @@ public void testWithAffiliationExtra() throws Exception { .param("filter", "author")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( - orcidEntry("Author From Orcid 1", REFERENCE, "0000-1111-2222-3333"), - orcidEntryWithAffiliation("Author From Orcid 2", REFERENCE, "0000-2222-3333-4444", "Org1, Org2"), - orcidEntryWithAffiliation("Author From Orcid 3", REFERENCE, "0000-5555-6666-7777", "Organization")))) + orcidEntry("From Orcid 1 Author", REFERENCE, "0000-1111-2222-3333"), + orcidEntryWithAffiliation("From Orcid 2 Author", REFERENCE, "0000-2222-3333-4444", "Org1, Org2"), + orcidEntryWithAffiliation("From Orcid 3 Author", REFERENCE, "0000-5555-6666-7777", "Organization")))) .andExpect(jsonPath("$.page.size", Matchers.is(20))) .andExpect(jsonPath("$.page.totalPages", Matchers.is(1))) .andExpect(jsonPath("$.page.totalElements", Matchers.is(3))); From b4c3fa0a79bcaeba313ff14667d7fdf20c707ae6 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Thu, 16 Nov 2023 12:56:36 +0100 Subject: [PATCH 582/686] [DSC-139] Added support for ROR identifiers out of the box --- .../crosswalks/template/orgUnit-cerif-xml.template | 1 + .../crosswalks/template/patent-datacite-xml.template | 9 +++++++++ .../crosswalks/template/product-datacite-xml.template | 9 +++++++++ .../template/publication-datacite-xml.template | 9 +++++++++ dspace/config/modules/orcid.cfg | 1 + 5 files changed, 29 insertions(+) diff --git a/dspace/config/crosswalks/template/orgUnit-cerif-xml.template b/dspace/config/crosswalks/template/orgUnit-cerif-xml.template index 484d2daaa6c4..69d94a27a320 100644 --- a/dspace/config/crosswalks/template/orgUnit-cerif-xml.template +++ b/dspace/config/crosswalks/template/orgUnit-cerif-xml.template @@ -5,6 +5,7 @@ @organization.legalName@ @organization.identifier@ @oairecerif.identifier.url@ + @organization.identifier.ror@ @relation.organization-parentOrganization.start@ diff --git a/dspace/config/crosswalks/template/patent-datacite-xml.template b/dspace/config/crosswalks/template/patent-datacite-xml.template index 9f31693eb4e2..a08d36f7a4fb 100644 --- a/dspace/config/crosswalks/template/patent-datacite-xml.template +++ b/dspace/config/crosswalks/template/patent-datacite-xml.template @@ -8,7 +8,16 @@ @relation.dc-contributor-author.start@ @person.identifier.orcid@ @relation.dc-contributor-author.end@ + @relation.oairecerif-author-affiliation.start@ + @dc.title@ + @relation.oairecerif-author-affiliation.end@ + @if.not.authority.oairecerif-author-affiliation.start@ @oairecerif.author.affiliation@ + @if.not.authority.oairecerif-author-affiliation.end@ @group.dc-contributor-author.end@ diff --git a/dspace/config/crosswalks/template/product-datacite-xml.template b/dspace/config/crosswalks/template/product-datacite-xml.template index 414527ee3378..7409088142de 100644 --- a/dspace/config/crosswalks/template/product-datacite-xml.template +++ b/dspace/config/crosswalks/template/product-datacite-xml.template @@ -8,7 +8,16 @@ @relation.dc-contributor-author.start@ @person.identifier.orcid@ @relation.dc-contributor-author.end@ + @relation.oairecerif-author-affiliation.start@ + @dc.title@ + @relation.oairecerif-author-affiliation.end@ + @if.not.authority.oairecerif-author-affiliation.start@ @oairecerif.author.affiliation@ + @if.not.authority.oairecerif-author-affiliation.end@ @group.dc-contributor-author.end@ diff --git a/dspace/config/crosswalks/template/publication-datacite-xml.template b/dspace/config/crosswalks/template/publication-datacite-xml.template index 9f31693eb4e2..a08d36f7a4fb 100644 --- a/dspace/config/crosswalks/template/publication-datacite-xml.template +++ b/dspace/config/crosswalks/template/publication-datacite-xml.template @@ -8,7 +8,16 @@ @relation.dc-contributor-author.start@ @person.identifier.orcid@ @relation.dc-contributor-author.end@ + @relation.oairecerif-author-affiliation.start@ + @dc.title@ + @relation.oairecerif-author-affiliation.end@ + @if.not.authority.oairecerif-author-affiliation.start@ @oairecerif.author.affiliation@ + @if.not.authority.oairecerif-author-affiliation.end@ @group.dc-contributor-author.end@ diff --git a/dspace/config/modules/orcid.cfg b/dspace/config/modules/orcid.cfg index bc4d8cfdd90e..4bce1fe38f96 100644 --- a/dspace/config/modules/orcid.cfg +++ b/dspace/config/modules/orcid.cfg @@ -152,6 +152,7 @@ orcid.mapping.organization.city = organization.address.addressLocality orcid.mapping.organization.identifiers = organization.identifier.crossrefid::FUNDREF orcid.mapping.organization.identifiers = organization.identifier.rin::RINGGOLD orcid.mapping.organization.identifiers = organization.identifier.lei::LEI +orcid.mapping.organization.identifiers = organization.identifier.ror::ROR ### Contributor mapping ### orcid.mapping.contributor.email = person.email From 5fd4e5f105017b0d16a67851936abc5a83396f1d Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 16 Nov 2023 19:15:29 +0100 Subject: [PATCH 583/686] [DSC-1369] Improve security level check by add a new maxAllowedMetadataVisibility property --- .../AbstractOrcidProfileSectionFactory.java | 15 +++++++++++++++ .../factory/impl/OrcidAffiliationFactory.java | 18 +----------------- .../impl/OrcidSimpleValueObjectFactory.java | 18 +----------------- dspace/config/spring/api/orcid-services.xml | 8 -------- 4 files changed, 17 insertions(+), 42 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java index 2c272e620cca..98f63193a964 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java @@ -34,6 +34,8 @@ public abstract class AbstractOrcidProfileSectionFactory implements OrcidProfile protected final OrcidProfileSyncPreference preference; + protected Integer maxAllowedMetadataVisibility = 0; + @Autowired protected ItemService itemService; @@ -70,4 +72,17 @@ protected List getMetadataValues(Item item, String metadataField) return itemService.getMetadataByMetadataString(item, metadataField); } + protected boolean isAllowedMetadataByVisibility(MetadataValue metadataValue) { + return metadataValue.getSecurityLevel() == null + || metadataValue.getSecurityLevel() <= getMaxAllowedMetadataVisibility(); + } + + public Integer getMaxAllowedMetadataVisibility() { + return maxAllowedMetadataVisibility; + } + + public void setMaxAllowedMetadataVisibility(Integer maxAllowedMetadataVisibility) { + this.maxAllowedMetadataVisibility = maxAllowedMetadataVisibility; + } + } diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java index 26623a7ce820..d74f05bcaf50 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java @@ -50,8 +50,6 @@ public class OrcidAffiliationFactory extends AbstractOrcidProfileSectionFactory private String endDateField; - private boolean isAllowedMetadataVisibility = false; - public OrcidAffiliationFactory(OrcidProfileSectionType sectionType, OrcidProfileSyncPreference preference) { super(sectionType, preference); } @@ -97,7 +95,7 @@ public List getMetadataSignatures(Context context, Item item) { List metadataValues = getMetadataValueByPlace(metadataGroups, currentGroupIndex); //only "visible" metadatavalues within this group metadataValues = metadataValues.stream() - .filter(metadataValue -> getAllowedMetadataVisibility(metadataValue)) + .filter(metadataValue -> isAllowedMetadataByVisibility(metadataValue)) .collect(Collectors.toList()); if (!metadataValues.isEmpty()) { signatures.add(metadataSignatureGenerator.generate(context, metadataValues)); @@ -160,13 +158,6 @@ private boolean isUnprocessableValue(MetadataValue value) { return value == null || isBlank(value.getValue()) || value.getValue().equals(PLACEHOLDER_PARENT_METADATA_VALUE); } - private boolean getAllowedMetadataVisibility(MetadataValue metadataValue) { - if (isAllowedMetadataVisibility()) { - return metadataValue.getSecurityLevel() == null || metadataValue.getSecurityLevel() == 0; - } - return true; - } - private Map> getMetadataGroups(Item item) { Map> metadataGroups = new HashMap<>(); metadataGroups.put(organizationField, itemService.getMetadataByMetadataString(item, organizationField)); @@ -219,11 +210,4 @@ public void setEndDateField(String endDateField) { this.endDateField = endDateField; } - public boolean isAllowedMetadataVisibility() { - return isAllowedMetadataVisibility; - } - - public void setAllowedMetadataVisibility(boolean allowedMetadataVisibility) { - isAllowedMetadataVisibility = allowedMetadataVisibility; - } } diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java index 1f55c2d62e99..28113e958332 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java @@ -42,8 +42,6 @@ public class OrcidSimpleValueObjectFactory extends AbstractOrcidProfileSectionFa private List metadataFields = new ArrayList(); - private boolean isAllowedMetadataVisibility = false; - public OrcidSimpleValueObjectFactory(OrcidProfileSectionType sectionType, OrcidProfileSyncPreference preference) { super(sectionType, preference); } @@ -78,7 +76,7 @@ public Object create(Context context, List metadataValues) { public List getMetadataSignatures(Context context, Item item) { return metadataFields.stream() .flatMap(metadataField -> getMetadataValues(item, metadataField).stream()) - .filter(metadataValue -> getAllowedMetadataVisibility(metadataValue)) + .filter(metadataValue -> isAllowedMetadataByVisibility(metadataValue)) .map(metadataValue -> metadataSignatureGenerator.generate(context, List.of(metadataValue))) .collect(Collectors.toList()); } @@ -140,13 +138,6 @@ private Address createAddress(Country country) { return address; } - private boolean getAllowedMetadataVisibility(MetadataValue metadataValue) { - if (isAllowedMetadataVisibility()) { - return metadataValue.getSecurityLevel() == null || metadataValue.getSecurityLevel() == 0; - } - return true; - } - public void setMetadataFields(String metadataFields) { this.metadataFields = metadataFields != null ? asList(metadataFields.split(",")) : emptyList(); } @@ -156,11 +147,4 @@ public List getMetadataFields() { return metadataFields; } - public boolean isAllowedMetadataVisibility() { - return isAllowedMetadataVisibility; - } - - public void setAllowedMetadataVisibility(boolean allowedMetadataVisibility) { - isAllowedMetadataVisibility = allowedMetadataVisibility; - } } diff --git a/dspace/config/spring/api/orcid-services.xml b/dspace/config/spring/api/orcid-services.xml index d9c9c6d5f249..eb4e20a459c2 100644 --- a/dspace/config/spring/api/orcid-services.xml +++ b/dspace/config/spring/api/orcid-services.xml @@ -112,7 +112,6 @@ - @@ -122,7 +121,6 @@ - @@ -132,42 +130,36 @@ - - - - - - From 32c3198fa0e3f5d5d7631553d7c23442276a1084 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Fri, 17 Nov 2023 17:01:50 +0100 Subject: [PATCH 584/686] [DSC-1361] Fixes dspace-api failing ITs --- .../bulkaccesscontrol/BulkAccessControl.java | 2 +- .../org/dspace/app/util/DCInputsReader.java | 2 +- .../authority/ChoiceAuthorityServiceImpl.java | 2 +- .../dspaceFolder/config/submission-forms.xml | 48 +++++++++++++++++++ .../crosswalks/ReferCrosswalkIT.java | 9 ++-- .../SubmissionDefinitionRestRepository.java | 2 +- .../WorkspaceItemRestRepository.java | 2 +- 7 files changed, 57 insertions(+), 10 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index 50e1022dbe37..7bef232f0450 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -464,7 +464,7 @@ private void setItemPolicies(Item item, BulkAccessControlInput accessControl) .forEach(accessCondition -> createResourcePolicy(item, accessCondition, itemAccessConditions.get(accessCondition.getName()))); - itemService.adjustItemPolicies(context, item, item.getOwningCollection()); + itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java index 4b2df5ac9784..936d5d9c62dd 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java @@ -169,7 +169,7 @@ public List getInputsByCollection(Collection collection) SubmissionConfig config; try { config = SubmissionServiceFactory.getInstance().getSubmissionConfigService() - .getSubmissionConfigByCollection(collection.getHandle()); + .getSubmissionConfigByCollection(collection); String formName = config.getSubmissionName(); if (formName == null) { throw new DCInputsReaderException("No form designated as default"); diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index fce01718f4ab..0c545d1592e2 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -574,7 +574,7 @@ public ChoiceAuthority getAuthorityByFieldKeyCollection(String fieldKey, int dso try { configReaderService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); SubmissionConfig submissionName = configReaderService - .getSubmissionConfigByCollection(collection.getHandle()); + .getSubmissionConfigByCollection(collection); if (submissionName == null) { log.warn("No submission name was found for object type " + dsoType + " in collection " + collection.getHandle()); diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 03431193d3a6..2aa9ac516720 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -1528,6 +1528,54 @@ You can leave out the day and/or month if they aren't applicable. +
    + + + oairecerif + affiliation + role + false + + + onebox + + + + + oairecerif + person + affiliation + false + + + onebox + + You must enter at least the organisation of your affiliation. + + + oairecerif + affiliation + startDate + false + + + date + + + + + oairecerif + affiliation + endDate + false + + + date + + + + +
    diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java index 770e3c83a2c7..5cb9e9dc6b8f 100644 --- a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ReferCrosswalkIT.java @@ -2713,7 +2713,6 @@ public void testPublicationVirtualFieldValuePairList() throws Exception { .build(); Item publicationItem = createItem(context, publicationCollection) - .withEntityType("Publication") .withTitle("Publication title") .withType("not translated", vocabularyName + ":c_7bab") .withLanguage("en_US") @@ -2730,11 +2729,11 @@ public void testPublicationVirtualFieldValuePairList() throws Exception { referCrosswalk.disseminate(context, publicationItem, out); String[] resultLines = out.toString().split("\n"); - assertThat(resultLines.length, is(7)); + assertThat(resultLines.length, is(6)); assertThat(resultLines[0].trim(), equalTo("")); - assertThat(resultLines[4].trim(), equalTo("software paper")); - assertThat(resultLines[5].trim(), equalTo("English (United States)")); - assertThat(resultLines[6].trim(), equalTo("")); + assertThat(resultLines[3].trim(), equalTo("software paper")); + assertThat(resultLines[4].trim(), equalTo("English (United States)")); + assertThat(resultLines[5].trim(), equalTo("")); } @Test diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java index 69380b225131..b9f69c25e0f3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionDefinitionRestRepository.java @@ -77,7 +77,7 @@ public SubmissionDefinitionRest findByCollection(@Parameter(value = "uuid", requ return null; } SubmissionDefinitionRest def = converter - .toRest(submissionConfigService.getSubmissionConfigByCollection(col.getHandle()), + .toRest(submissionConfigService.getSubmissionConfigByCollection(col), utils.obtainProjection()); return def; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java index 5f348166572b..566ed14f333b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/WorkspaceItemRestRepository.java @@ -312,7 +312,7 @@ public Iterable upload(Context context, HttpServletRequest re } SubmissionConfig submissionConfig = - submissionConfigService.getSubmissionConfigByCollection(collection.getHandle()); + submissionConfigService.getSubmissionConfigByCollection(collection); List result = null; List records = new ArrayList<>(); try { From cc783bbe9faf22d5eca2db98b871f615686cdacb Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 20 Nov 2023 12:26:39 +0100 Subject: [PATCH 585/686] [DSC-1361] Fixes dspace-api failing ITs feat: - Improved EventService usage handling in ITs --- .../consumer/ItemEnhancerConsumer.java | 13 ++++++ .../test/data/dspaceFolder/config/local.cfg | 10 +++++ .../FileTypeMetadataEnhancerConsumerIT.java | 41 +++++++++++++++++++ .../enhancer/script/ItemEnhancerScriptIT.java | 22 ++++++---- .../app/rest/UpdateItemReferenceIT.java | 8 ++-- 5 files changed, 82 insertions(+), 12 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java index c6baab84048d..5a2ae2975ef8 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java @@ -15,6 +15,8 @@ import org.dspace.core.Context; import org.dspace.event.Consumer; import org.dspace.event.Event; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.utils.DSpace; /** @@ -26,10 +28,13 @@ */ public class ItemEnhancerConsumer implements Consumer { + public static final String ITEMENHANCER_ENABLED = "itemenhancer.enabled"; private Set itemsAlreadyProcessed = new HashSet(); private ItemEnhancerService itemEnhancerService; + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + @Override public void finish(Context ctx) throws Exception { @@ -43,6 +48,10 @@ public void initialize() throws Exception { @Override public void consume(Context context, Event event) throws Exception { + if (!isConsumerEnabled()) { + return; + } + Item item = (Item) event.getSubject(context); if (item == null || itemsAlreadyProcessed.contains(item) || !item.isArchived()) { return; @@ -59,6 +68,10 @@ public void consume(Context context, Event event) throws Exception { } + protected boolean isConsumerEnabled() { + return configurationService.getBooleanProperty(ITEMENHANCER_ENABLED, true); + } + @Override public void end(Context ctx) throws Exception { itemsAlreadyProcessed.clear(); diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index df96d9775c3c..8ff8df9944c4 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -214,3 +214,13 @@ logging.server.include-stacktrace-for-httpcode = 400, 401, 404, 403, 422 # Configuration required for thorough testing of browse links webui.browse.link.1 = author:dc.contributor.* webui.browse.link.2 = subject:dc.subject.* + +# Enable researcher profiles and orcid synchronization for tests +researcher-profile.entity-type = Person +orcid.synchronization-enabled = true + +# Configuration settings required for Researcher Profiles +# These settings ensure "dspace.object.owner" field are indexed by Authority Control +choices.plugin.dspace.object.owner = EPersonAuthority +choices.presentation.dspace.object.owner = suggest +authority.controlled.dspace.object.owner = true \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java b/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java index 200ae7b3fefa..9bdc3f752065 100644 --- a/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/app/filetype/consumer/FileTypeMetadataEnhancerConsumerIT.java @@ -16,7 +16,10 @@ import java.io.IOException; import java.sql.SQLException; import java.text.ParseException; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import org.apache.commons.codec.binary.StringUtils; @@ -38,14 +41,25 @@ import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; +import org.dspace.event.factory.EventServiceFactory; +import org.dspace.event.service.EventService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; public class FileTypeMetadataEnhancerConsumerIT extends AbstractIntegrationTestWithDatabase { + private static final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private static final EventService eventService = EventServiceFactory.getInstance().getEventService(); + private Collection collection; private final BitstreamService bitstreamService = ContentServiceFactory.getInstance() @@ -53,6 +67,33 @@ public class FileTypeMetadataEnhancerConsumerIT extends AbstractIntegrationTestW private final ItemService itemService = ContentServiceFactory.getInstance() .getItemService(); + private static String[] consumers; + + /** + * This method will be run before the first test as per @BeforeClass. It will + * configure the event.dispatcher.default.consumers property to remove the + * FileTypeMetadataEnhancerConsumer. + */ + @BeforeClass + public static void initConsumers() { + consumers = configurationService.getArrayProperty("event.dispatcher.default.consumers"); + Set consumersSet = new HashSet(Arrays.asList(consumers)); + if (!consumersSet.contains("filetypemetadataenhancer")) { + consumersSet.add("filetypemetadataenhancer"); + configurationService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); + eventService.reloadConfiguration(); + } + } + + /** + * Reset the event.dispatcher.default.consumers property value. + */ + @AfterClass + public static void resetDefaultConsumers() { + configurationService.setProperty("event.dispatcher.default.consumers", consumers); + eventService.reloadConfiguration(); + } + @Before public void setup() { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java b/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java index 1bf48c373177..33913368b0a1 100644 --- a/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java +++ b/dspace-api/src/test/java/org/dspace/content/enhancer/script/ItemEnhancerScriptIT.java @@ -9,6 +9,7 @@ import static org.dspace.app.matcher.MetadataValueMatcher.with; import static org.dspace.content.Item.ANY; +import static org.dspace.content.enhancer.consumer.ItemEnhancerConsumer.ITEMENHANCER_ENABLED; import static org.dspace.core.CrisConstants.PLACEHOLDER_PARENT_METADATA_VALUE; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; @@ -48,12 +49,16 @@ public class ItemEnhancerScriptIT extends AbstractIntegrationTestWithDatabase { - private static String[] consumers; + private static ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final EventService eventService = EventServiceFactory.getInstance().getEventService(); + private static boolean isEnabled; + private static String[] consumers; private ItemService itemService; private Collection collection; + /** * This method will be run before the first test as per @BeforeClass. It will * configure the event.dispatcher.default.consumers property to remove the @@ -61,13 +66,13 @@ public class ItemEnhancerScriptIT extends AbstractIntegrationTestWithDatabase { */ @BeforeClass public static void initConsumers() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); consumers = configService.getArrayProperty("event.dispatcher.default.consumers"); Set consumersSet = new HashSet(Arrays.asList(consumers)); - consumersSet.remove("itemenhancer"); - configService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); - EventService eventService = EventServiceFactory.getInstance().getEventService(); - eventService.reloadConfiguration(); + if (!consumersSet.contains("itemenhancer")) { + consumersSet.add("itemenhancer"); + configService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); + eventService.reloadConfiguration(); + } } /** @@ -75,18 +80,19 @@ public static void initConsumers() { */ @AfterClass public static void resetDefaultConsumers() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); configService.setProperty("event.dispatcher.default.consumers", consumers); - EventService eventService = EventServiceFactory.getInstance().getEventService(); eventService.reloadConfiguration(); } @Before public void setup() { + configService.setProperty(ITEMENHANCER_ENABLED, false); + itemService = ContentServiceFactory.getInstance().getItemService(); context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") .build(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java index cc51a7d413f0..bef44b4b714b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateItemReferenceIT.java @@ -40,6 +40,10 @@ */ public class UpdateItemReferenceIT extends AbstractControllerIntegrationTest { + + private static final ConfigurationService configService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final EventService eventService = EventServiceFactory.getInstance().getEventService(); private static String[] consumers; @Autowired @@ -52,13 +56,11 @@ public class UpdateItemReferenceIT extends AbstractControllerIntegrationTest { */ @BeforeClass public static void initCrisConsumer() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); consumers = configService.getArrayProperty("event.dispatcher.default.consumers"); Set consumersSet = new HashSet(Arrays.asList(consumers)); consumersSet.remove("referenceresolver"); consumersSet.remove("crisconsumer"); configService.setProperty("event.dispatcher.default.consumers", consumersSet.toArray()); - EventService eventService = EventServiceFactory.getInstance().getEventService(); eventService.reloadConfiguration(); } @@ -67,9 +69,7 @@ public static void initCrisConsumer() { */ @AfterClass public static void resetDefaultConsumers() { - ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); configService.setProperty("event.dispatcher.default.consumers", consumers); - EventService eventService = EventServiceFactory.getInstance().getEventService(); eventService.reloadConfiguration(); } From 310210c83b7f0ec17695c76cb6903946236ff4ad Mon Sep 17 00:00:00 2001 From: Alexander K Date: Mon, 20 Nov 2023 15:44:11 +0100 Subject: [PATCH 586/686] [DSC-820] add bitstream.hide metadata --- dspace/config/submission-forms.xml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 784b8ee301aa..c44ac175c1c7 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -37,6 +37,17 @@ + + + bitstream + hide + + dropdown + false + + + +
    From 30f4d1220b5299d83400942376dafbdd76c19f95 Mon Sep 17 00:00:00 2001 From: Luca Giamminonni Date: Tue, 21 Nov 2023 11:51:25 +0100 Subject: [PATCH 587/686] [DSC-1357] ORCID integration not working correctly with versions --- .../orcid/consumer/OrcidQueueConsumer.java | 51 +++++--- .../org/dspace/orcid/dao/OrcidQueueDAO.java | 10 ++ .../orcid/dao/impl/OrcidQueueDAOImpl.java | 7 + .../orcid/service/OrcidQueueService.java | 10 ++ .../service/impl/OrcidQueueServiceImpl.java | 5 + .../dspace/versioning/VersioningConsumer.java | 35 +++++ .../dspace/orcid/OrcidQueueConsumerIT.java | 121 ++++++++++++++++++ 7 files changed, 223 insertions(+), 16 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java index 97605429d9cd..869dc452c7d8 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java +++ b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java @@ -14,9 +14,10 @@ import static org.apache.commons.collections.CollectionUtils.isNotEmpty; import java.sql.SQLException; -import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.UUID; import java.util.stream.Stream; @@ -30,6 +31,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.core.CrisConstants; +import org.dspace.core.exception.SQLRuntimeException; import org.dspace.event.Consumer; import org.dspace.event.Event; import org.dspace.orcid.OrcidHistory; @@ -73,7 +75,7 @@ public class OrcidQueueConsumer implements Consumer { private ConfigurationService configurationService; - private List alreadyConsumedItems = new ArrayList<>(); + private Set itemsToConsume = new HashSet<>(); @Override public void initialize() throws Exception { @@ -107,16 +109,26 @@ public void consume(Context context, Event event) throws Exception { return; } - if (alreadyConsumedItems.contains(item.getID())) { - return; - } + itemsToConsume.add(item.getID()); + } + + @Override + public void end(Context context) throws Exception { + + for (UUID itemId : itemsToConsume) { + + Item item = itemService.find(context, itemId); + + context.turnOffAuthorisationSystem(); + try { + consumeItem(context, item); + } finally { + context.restoreAuthSystemState(); + } - context.turnOffAuthorisationSystem(); - try { - consumeItem(context, item); - } finally { - context.restoreAuthSystemState(); } + + itemsToConsume.clear(); } private void consumeItem(Context context, Item item) throws SQLException { @@ -132,7 +144,7 @@ private void consumeItem(Context context, Item item) throws SQLException { consumeProfile(context, item); } - alreadyConsumedItems.add(item.getID()); + itemsToConsume.add(item.getID()); } @@ -162,6 +174,10 @@ private void consumeEntity(Context context, Item entity) throws SQLException { continue; } + if (isNotLatestVersion(context, entity)) { + continue; + } + orcidQueueService.create(context, relatedItem, entity); } @@ -297,6 +313,14 @@ private boolean isNotProfileItem(Item profileItemItem) { return !getProfileType().equals(itemService.getEntityTypeLabel(profileItemItem)); } + private boolean isNotLatestVersion(Context context, Item entity) { + try { + return !itemService.isLatestVersion(context, entity); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + private boolean isNestedMetadataPlaceholder(MetadataValue metadata) { return StringUtils.equals(metadata.getValue(), CrisConstants.PLACEHOLDER_PARENT_METADATA_VALUE); } @@ -322,11 +346,6 @@ private boolean isOrcidSynchronizationDisabled() { return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); } - @Override - public void end(Context context) throws Exception { - alreadyConsumedItems.clear(); - } - @Override public void finish(Context context) throws Exception { // nothing to do diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java index 235443b15033..b7e0b1ed2a85 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java @@ -74,6 +74,16 @@ public List findByProfileItemAndEntity(Context context, Item profile */ public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + /** + * Get the OrcidQueue records where the given item is the entity. + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidQueue entities + * @throws SQLException if database error + */ + public List findByEntity(Context context, Item item) throws SQLException; + /** * Find all the OrcidQueue records with the given entity and record type. * diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java index 2114b2535759..8e941b056535 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java @@ -63,6 +63,13 @@ public List findByProfileItemOrEntity(Context context, Item item) th return query.getResultList(); } + @Override + public List findByEntity(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE entity.id = :itemId"); + query.setParameter("itemId", item.getID()); + return query.getResultList(); + } + @Override public List findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException { Query query = createQuery(context, "FROM OrcidQueue WHERE entity = :entity AND recordType = :type"); diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java index 8de25e9caf1e..b667088eabb4 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java @@ -164,6 +164,16 @@ public List findByProfileItemAndEntity(Context context, Item profile */ public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + /** + * Get the OrcidQueue records where the given item is the entity. + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByEntity(Context context, Item item) throws SQLException; + /** * Get all the OrcidQueue records with attempts less than the given attempts. * diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java index 98ab0c713a24..c0f70911b562 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java @@ -81,6 +81,11 @@ public List findByProfileItemOrEntity(Context context, Item item) th return orcidQueueDAO.findByProfileItemOrEntity(context, item); } + @Override + public List findByEntity(Context context, Item item) throws SQLException { + return orcidQueueDAO.findByEntity(context, item); + } + @Override public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { return orcidQueueDAO.countByProfileItemId(context, profileItemId); diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java index 51a49d14cc4a..3a5da0de1016 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java @@ -31,9 +31,15 @@ import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.exception.SQLRuntimeException; import org.dspace.discovery.IndexEventConsumer; import org.dspace.event.Consumer; import org.dspace.event.Event; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; import org.dspace.utils.DSpace; import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.service.VersionHistoryService; @@ -61,6 +67,8 @@ public class VersioningConsumer implements Consumer { private RelationshipService relationshipService; private RelationshipVersioningUtils relationshipVersioningUtils; private DedupService dedupService; + private OrcidQueueService orcidQueueService; + private OrcidHistoryService orcidHistoryService; @Override public void initialize() throws Exception { @@ -72,6 +80,8 @@ public void initialize() throws Exception { relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils(); dedupService = new DSpace().getServiceManager().getServiceByName(DedupService.class.getName(), DedupService.class); + this.orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + this.orcidHistoryService = OrcidServiceFactory.getInstance().getOrcidHistoryService(); } @Override @@ -138,6 +148,8 @@ public void consume(Context ctx, Event event) throws Exception { // unarchive previous item unarchiveItem(ctx, previousItem); + handleOrcidSynchronization(ctx, previousItem, latestItem); + updateDuplicateDetection(ctx, latestItem, previousItem); // update relationships @@ -155,6 +167,29 @@ protected void unarchiveItem(Context ctx, Item item) { )); } + private void handleOrcidSynchronization(Context ctx, Item previousItem, Item latestItem) { + try { + replaceOrcidHistoryEntities(ctx, previousItem, latestItem); + removeOrcidQueueEntries(ctx, previousItem); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + + private void removeOrcidQueueEntries(Context ctx, Item previousItem) throws SQLException { + List queueEntries = orcidQueueService.findByEntity(ctx, previousItem); + for (OrcidQueue queueEntry : queueEntries) { + orcidQueueService.delete(ctx, queueEntry); + } + } + + private void replaceOrcidHistoryEntities(Context ctx, Item previousItem, Item latestItem) throws SQLException { + List entries = orcidHistoryService.findByEntity(ctx, previousItem); + for (OrcidHistory entry : entries) { + entry.setEntity(latestItem); + } + } + private void updateDuplicateDetection(Context ctx, Item latestItem, Item previousItem) throws Exception { dedupService.inheritDecisions(ctx, previousItem, latestItem); dedupService.removeMatch(previousItem); diff --git a/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java index 1cec9473ba22..a1ebec2197e4 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java @@ -26,6 +26,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; import java.sql.SQLException; import java.time.Instant; @@ -42,13 +43,19 @@ import org.dspace.content.Collection; import org.dspace.content.Item; import org.dspace.content.MetadataValue; +import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.InstallItemService; import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; import org.dspace.orcid.consumer.OrcidQueueConsumer; import org.dspace.orcid.factory.OrcidServiceFactory; import org.dspace.orcid.service.OrcidQueueService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; +import org.dspace.versioning.Version; +import org.dspace.versioning.service.VersioningService; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -65,8 +72,15 @@ public class OrcidQueueConsumerIT extends AbstractIntegrationTestWithDatabase { private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + + private InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private VersioningService versioningService = new DSpace().getServiceManager() + .getServicesByType(VersioningService.class).get(0); + private Collection profileCollection; @Before @@ -966,6 +980,113 @@ public void testOrcidQueueRecordCreationForPublicationWithNotFoundAuthority() th assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); } + @Test + public void testOrcidQueueWithItemVersioning() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User", profile.getID().toString()) + .build(); + + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); + + Version newVersion = versioningService.createNewVersion(context, publication); + Item newPublication = newVersion.getItem(); + assertThat(newPublication.isArchived(), is(false)); + + context.commit(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); + + WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, newVersion.getItem()); + installItemService.installItem(context, workspaceItem); + + context.commit(); + + context.restoreAuthSystemState(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, newPublication, "Publication", INSERT)); + + } + + @Test + public void testOrcidQueueUpdateWithItemVersioning() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .build(); + + OrcidHistory orcidHistory = OrcidHistoryBuilder.createOrcidHistory(context, profile, publication) + .withDescription("Test publication") + .withOperation(OrcidOperation.INSERT) + .withPutCode("12345") + .withStatus(201) + .build(); + + addMetadata(publication, "dc", "contributor", "author", "Test User", profile.getID().toString()); + + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", "12345", UPDATE)); + + Version newVersion = versioningService.createNewVersion(context, publication); + Item newPublication = newVersion.getItem(); + assertThat(newPublication.isArchived(), is(false)); + + context.commit(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", "12345", UPDATE)); + + WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, newVersion.getItem()); + installItemService.installItem(context, workspaceItem); + + context.commit(); + + context.restoreAuthSystemState(); + + orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, newPublication, "Publication", "12345", UPDATE)); + + orcidHistory = context.reloadEntity(orcidHistory); + assertThat(orcidHistory.getEntity(), is(newPublication)); + + } + private void addMetadata(Item item, String schema, String element, String qualifier, String value, String authority) throws Exception { context.turnOffAuthorisationSystem(); From af7d0b3a8fbbb1fc237440b50f244e036fca7b79 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Tue, 21 Nov 2023 13:32:44 +0200 Subject: [PATCH 588/686] [DSC-1315] finalize layout on EPFL --- .../impl/CrisLayoutToolValidatorImpl.java | 22 ++++++++++++------- .../java/org/dspace/util/WorkbookUtils.java | 15 +++++++++++++ 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java index a3e45858d789..d6d4e6be745a 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolValidatorImpl.java @@ -11,6 +11,8 @@ import static org.dspace.util.WorkbookUtils.getCellIndexFromHeaderName; import static org.dspace.util.WorkbookUtils.getCellValue; import static org.dspace.util.WorkbookUtils.getColumnWithoutHeader; +import static org.dspace.util.WorkbookUtils.getEntityTypeCellValue; +import static org.dspace.util.WorkbookUtils.getEntityTypeValue; import static org.dspace.util.WorkbookUtils.getNotEmptyRowsSkippingHeader; import java.sql.SQLException; @@ -456,7 +458,7 @@ private void validatePresenceInBoxSheet(CrisLayoutToolValidationResult result, S int entityTypeColumn, int nameColumn) { for (Row row : getNotEmptyRowsSkippingHeader(sheet)) { - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String name = getCellValue(row, nameColumn); if (isNotPresentOnSheet(sheet.getWorkbook(), BOX_SHEET, entityType, name)) { result.addError("The box with name " + name + @@ -490,7 +492,7 @@ private void validatePresenceInTab2BoxSheet(CrisLayoutToolValidationResult resul } for (Row row : getNotEmptyRowsSkippingHeader(sheet)) { - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String shortname = getCellValue(row, shortnameColumn); if (isNotPresentOnTab2Box(tab2boxSheet, columnName, entityType, shortname)) { result.addWarning("The " + sheet.getSheetName() + " with name " + shortname + @@ -507,7 +509,7 @@ private void validateTab2BoxRowReferences(Row row, CrisLayoutToolValidationResul Sheet tab2boxSheet = row.getSheet(); - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String tab = getCellValue(row, tabColumn); String[] boxes = splitByCommaAndTrim(getCellValue(row, boxesColumn)); @@ -560,7 +562,7 @@ private void validateRowStyleColumn(Sheet sheet, String containerColumnName, continue; } - String entityType = getCellValue(row, entityTypeColumn); + String entityType = getEntityTypeCellValue(row, entityTypeColumn); String container = getCellValue(row, containerColumn); String rowCount = getCellValue(row, rowColumn); @@ -585,7 +587,7 @@ private List findSameRowsWithDifferentStyle(Sheet sheet, String entity, .filter(sheetRow -> excelRowNum != sheetRow.getRowNum()) .filter(sheetRow -> row.equals(getCellValue(sheetRow, rowColumn))) .filter(sheetRow -> container.equals(getCellValue(sheetRow, containerColumn))) - .filter(sheetRow -> entity.equals(getCellValue(sheetRow, entityTypeColumn))) + .filter(sheetRow -> entity.equals(getEntityTypeCellValue(sheetRow, entityTypeColumn))) .filter(sheetRow -> hasDifferentStyle(sheetRow, rowStyleColumn, style)) .map(Row::getRowNum) .collect(Collectors.toList()); @@ -633,7 +635,8 @@ private boolean sameEntityTypeAndName(Row row, int entityTypeColumn, String enti int nameColumn, String name) { String[] namesOnColumn = splitByCommaAndTrim(getCellValue(row, nameColumn)); - return entityType.equals(getCellValue(row, entityTypeColumn)) && ArrayUtils.contains(namesOnColumn, name); + return entityType.equals(getEntityTypeCellValue(row, entityTypeColumn)) + && ArrayUtils.contains(namesOnColumn, name); } @@ -641,8 +644,11 @@ private void validateEntityTypes(CrisLayoutToolValidationResult result, Sheet sh int entityColumn, List allEntityTypes) { for (Cell entityTypeCell : getColumnWithoutHeader(sheet, entityColumn)) { - String entityType = WorkbookUtils.getCellValue(entityTypeCell); - if (!allEntityTypes.contains(entityType)) { + String entityType = getCellValue(entityTypeCell); + if ( + !allEntityTypes.contains(entityType) && + !allEntityTypes.contains(getEntityTypeValue(entityTypeCell)) + ) { result.addError("The " + sheet.getSheetName() + " contains an unknown entity type '" + entityType + "' at row " + entityTypeCell.getRowIndex()); } diff --git a/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java b/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java index 64b6b95e8ec7..8ac9a4f7f13e 100644 --- a/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/WorkbookUtils.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -87,6 +88,11 @@ public static List getRowValues(Row row, int size) { return values; } + public static String getEntityTypeCellValue(Row row, int index) { + Cell cell = row.getCell(index); + return getEntityTypeValue(cell); + } + public static String getCellValue(Row row, int index) { Cell cell = row.getCell(index); return getCellValue(cell); @@ -105,6 +111,15 @@ public static String getCellValue(Cell cell) { return formatter.formatCellValue(cell).trim(); } + public static String getEntityTypeValue(Cell cell) { + String cellValue = getCellValue(cell); + return Optional.ofNullable(cellValue) + .filter(value -> StringUtils.isNotBlank(value)) + .filter(value -> value.contains(".")) + .map(value -> value.split("\\.")[0]) + .orElse(cellValue); + } + public static Cell createCell(Row row, int column, String value) { Cell cell = row.createCell(column); cell.setCellValue(value); From b7b1d1b8425d536e10cd8cef6ff6507034c84e51 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Tue, 21 Nov 2023 13:42:25 +0200 Subject: [PATCH 589/686] [DSC-1315] added missed sql files --- ...0.28__update_cris_layout_tab_constraint.sql | 18 ++++++++++++++++++ ...0.28__update_cris_layout_tab_constraint.sql | 18 ++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql new file mode 100644 index 000000000000..6ea435bfeed2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- -- +-- Remove unique constraint on entity_id and shortname of table cris_layout_tab. +-- Now the entity_id and shortname aren't unique because entity_type can have custom_filter in it +-- -- +ALTER TABLE cris_layout_tab DROP CONSTRAINT cris_layout_tab_entity_shortname_unique; + +-- -- +-- +-- -- +ALTER TABLE cris_layout_tab ADD CONSTRAINT cris_layout_tab_entity_shortname_custom_filter_unique UNIQUE(entity_id, shortname, custom_filter); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql new file mode 100644 index 000000000000..6ea435bfeed2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.10.28__update_cris_layout_tab_constraint.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- -- +-- Remove unique constraint on entity_id and shortname of table cris_layout_tab. +-- Now the entity_id and shortname aren't unique because entity_type can have custom_filter in it +-- -- +ALTER TABLE cris_layout_tab DROP CONSTRAINT cris_layout_tab_entity_shortname_unique; + +-- -- +-- +-- -- +ALTER TABLE cris_layout_tab ADD CONSTRAINT cris_layout_tab_entity_shortname_custom_filter_unique UNIQUE(entity_id, shortname, custom_filter); \ No newline at end of file From 7abc375853748f8ee84c70fcf625dedff4f1be31 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 7 Nov 2023 11:03:39 +0100 Subject: [PATCH 590/686] [DSC-1361] Fixes SitemapRestControllerIT --- .../dspace/app/sitemap/GenerateSitemaps.java | 238 +++++------------- .../app/rest/SitemapRestControllerIT.java | 12 +- 2 files changed, 69 insertions(+), 181 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index 2b1c44988b88..90962d12aa75 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -7,17 +7,11 @@ */ package org.dspace.app.sitemap; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLEncoder; import java.sql.SQLException; -import java.util.Iterator; +import java.util.Date; +import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -25,24 +19,22 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchUtils; -import org.dspace.discovery.SolrSearchCore; -import org.dspace.eperson.Group; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -58,11 +50,14 @@ public class GenerateSitemaps { */ private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class); + private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + private static final CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private static final SearchService searchService = SearchUtils.getSearchService(); - private static final GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); - private static final int PAGE_SIZE = 1000; + private static final int PAGE_SIZE = 100; /** * Default constructor @@ -82,11 +77,6 @@ public static void main(String[] args) throws Exception { "do not generate sitemaps.org protocol sitemap"); options.addOption("b", "no_htmlmap", false, "do not generate a basic HTML sitemap"); - options.addOption("a", "ping_all", false, - "ping configured search engines"); - options - .addOption("p", "ping", true, - "ping specified search engine URL"); options .addOption("d", "delete", false, "delete sitemaps dir and its contents"); @@ -131,20 +121,6 @@ public static void main(String[] args) throws Exception { deleteSitemaps(); } - if (line.hasOption('a')) { - pingConfiguredSearchEngines(); - } - - if (line.hasOption('p')) { - try { - pingSearchEngine(line.getOptionValue('p')); - } catch (MalformedURLException me) { - System.err - .println("Bad search engine URL (include all except sitemap URL)"); - System.exit(1); - } - } - System.exit(0); } @@ -205,32 +181,24 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) } Context c = new Context(Context.Mode.READ_ONLY); - SolrSearchCore solrSearchCore = searchService.getSolrSearchCore(); - SolrClient solr = solrSearchCore.getSolr(); - Group anonymousGroup = groupService.findByName(c, Group.ANONYMOUS); - String anonGroupId = ""; - if (anonymousGroup != null) { - anonGroupId = anonymousGroup.getID().toString(); - } + int offset = 0; + long commsCount = 0; + long collsCount = 0; + long itemsCount = 0; try { - SolrQuery solrQuery = new SolrQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":Community"); - solrQuery.addFilterQuery("read:g" + anonGroupId); - solrQuery.setFields(SearchUtils.RESOURCE_ID_FIELD); - solrQuery.setRows(PAGE_SIZE); - int offset = 0; - long commsCount = 0; - QueryResponse rsp; + DiscoverQuery discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Community"); do { - solrQuery.setStart(offset); - rsp = solr.query(solrQuery, solrSearchCore.REQUEST_METHOD); - SolrDocumentList docs = rsp.getResults(); - commsCount = docs.getNumFound(); - Iterator iter = docs.iterator(); + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + commsCount = discoverResult.getTotalSearchResults(); - while (iter.hasNext()) { - SolrDocument doc = (SolrDocument) iter.next(); - String url = uiURLStem + "/communities/" + doc.getFieldValue(SearchUtils.RESOURCE_ID_FIELD); + for (IndexableObject doc : docs) { + String url = uiURLStem + "communities/" + doc.getID(); + c.uncacheEntity(doc.getIndexedObject()); if (makeHTMLMap) { html.addURL(url, null); @@ -242,22 +210,19 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) offset += PAGE_SIZE; } while (offset < commsCount); - solrQuery = new SolrQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":Collection"); - solrQuery.addFilterQuery("read:g" + anonGroupId); - solrQuery.setFields(SearchUtils.RESOURCE_ID_FIELD); - solrQuery.setRows(PAGE_SIZE); offset = 0; - long collsCount = 0; + discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Collection"); do { - solrQuery.setStart(offset); - rsp = solr.query(solrQuery, solrSearchCore.REQUEST_METHOD); - SolrDocumentList docs = rsp.getResults(); - collsCount = docs.getNumFound(); - Iterator iter = docs.iterator(); + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + collsCount = discoverResult.getTotalSearchResults(); - while (iter.hasNext()) { - SolrDocument doc = (SolrDocument) iter.next(); - String url = uiURLStem + "/collections/" + doc.getFieldValue(SearchUtils.RESOURCE_ID_FIELD); + for (IndexableObject doc : docs) { + String url = uiURLStem + "collections/" + doc.getID(); + c.uncacheEntity(doc.getIndexedObject()); if (makeHTMLMap) { html.addURL(url, null); @@ -269,39 +234,37 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) offset += PAGE_SIZE; } while (offset < collsCount); - solrQuery = new SolrQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":Item"); - solrQuery.setFields(SearchUtils.RESOURCE_ID_FIELD, "customurl", "search.entitytype"); - solrQuery.addFilterQuery("read:g" + anonGroupId); - solrQuery.addFilterQuery("-discoverable:false"); - solrQuery.setRows(PAGE_SIZE); offset = 0; - long itemsCount = 0; + discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Item"); + discoveryQuery.addSearchField("search.entitytype"); do { - solrQuery.setStart(offset); - rsp = solr.query(solrQuery, solrSearchCore.REQUEST_METHOD); - SolrDocumentList docs = rsp.getResults(); - itemsCount = docs.getNumFound(); - Iterator iter = docs.iterator(); - - while (iter.hasNext()) { - SolrDocument doc = (SolrDocument) iter.next(); - String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD); - String entityType = (String) doc.getFirstValue("search.entitytype"); - String customUrl = (String) doc.getFirstValue("customUrl"); - String url = uiURLStem + "/items/" + uuid; - - if (StringUtils.isNotBlank(customUrl)) { - url = uiURLStem + "/entities/" + StringUtils.lowerCase(entityType) + "/" + customUrl; - } else if (StringUtils.isNoneBlank(entityType)) { - url = uiURLStem + "/entities/" + StringUtils.lowerCase(entityType) + "/" + uuid; + + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + itemsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url; + List entityTypeFieldValues = discoverResult.getSearchDocument(doc).get(0) + .getSearchFieldValues("search.entitytype"); + if (CollectionUtils.isNotEmpty(entityTypeFieldValues)) { + url = uiURLStem + "entities/" + StringUtils.lowerCase(entityTypeFieldValues.get(0)) + "/" + + doc.getID(); + } else { + url = uiURLStem + "items/" + doc.getID(); } + Date lastMod = doc.getLastModified(); + c.uncacheEntity(doc.getIndexedObject()); + if (makeHTMLMap) { html.addURL(url, null); } if (makeSitemapOrg) { sitemapsOrg.addURL(url, null); } - } offset += PAGE_SIZE; } while (offset < itemsCount); @@ -321,85 +284,10 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) + commsCount + ",collections=" + collsCount + ",items=" + itemsCount)); } - } catch (SolrServerException e) { + } catch (SearchServiceException e) { throw new RuntimeException(e); } finally { c.abort(); } } - - /** - * Ping all search engines configured in {@code dspace.cfg}. - * - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingConfiguredSearchEngines() - throws UnsupportedEncodingException { - String[] engineURLs = configurationService - .getArrayProperty("sitemap.engineurls"); - - if (ArrayUtils.isEmpty(engineURLs)) { - log.warn("No search engine URLs configured to ping"); - return; - } - - for (int i = 0; i < engineURLs.length; i++) { - try { - pingSearchEngine(engineURLs[i]); - } catch (MalformedURLException me) { - log.warn("Bad search engine URL in configuration: " - + engineURLs[i]); - } - } - } - - /** - * Ping the given search engine. - * - * @param engineURL Search engine URL minus protocol etc, e.g. - * {@code www.google.com} - * @throws MalformedURLException if the passed in URL is malformed - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingSearchEngine(String engineURL) - throws MalformedURLException, UnsupportedEncodingException { - // Set up HTTP proxy - if ((StringUtils.isNotBlank(configurationService.getProperty("http.proxy.host"))) - && (StringUtils.isNotBlank(configurationService.getProperty("http.proxy.port")))) { - System.setProperty("proxySet", "true"); - System.setProperty("proxyHost", configurationService - .getProperty("http.proxy.host")); - System.getProperty("proxyPort", configurationService - .getProperty("http.proxy.port")); - } - - String sitemapURL = configurationService.getProperty("dspace.ui.url") - + "/sitemap"; - - URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8")); - - try { - HttpURLConnection connection = (HttpURLConnection) url - .openConnection(); - - BufferedReader in = new BufferedReader(new InputStreamReader( - connection.getInputStream())); - - String inputLine; - StringBuffer resp = new StringBuffer(); - while ((inputLine = in.readLine()) != null) { - resp.append(inputLine).append("\n"); - } - in.close(); - - if (connection.getResponseCode() == 200) { - log.info("Pinged " + url.toString() + " successfully"); - } else { - log.warn("Error response pinging " + url.toString() + ":\n" - + resp); - } - } catch (IOException e) { - log.warn("Error pinging " + url.toString(), e); - } - } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java index 03328966f6bd..175fb34e6cac 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SitemapRestControllerIT.java @@ -71,6 +71,9 @@ public void setUp() throws Exception { policyService.removeAllPolicies(context, communityRestricted); collection = CollectionBuilder.createCollection(context, community).build(); collectionRestricted = CollectionBuilder.createCollection(context, community).build(); + Collection publicationCollection = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .withName("Publication Collection").build(); policyService.removeAllPolicies(context, collectionRestricted); this.item1 = createItem(context, collection) @@ -91,20 +94,17 @@ public void setUp() throws Exception { .withIssueDate("2015-8-3") .makeUnDiscoverable() .build(); - this.entityPublication = createItem(context, collection) + this.entityPublication = createItem(context, publicationCollection) .withTitle("Item Publication") - .withEntityType("Publication") .withIssueDate("2015-8-3") .build(); - this.entityPublicationRestricted = createItem(context, collection) + this.entityPublicationRestricted = createItem(context, publicationCollection) .withTitle("Item Publication Restricted") - .withEntityType("Publication") .withIssueDate("2015-8-3") .build(); policyService.removeAllPolicies(context, entityPublicationRestricted); - this.entityPublicationUndiscoverable = createItem(context, collection) + this.entityPublicationUndiscoverable = createItem(context, publicationCollection) .withTitle("Item Publication") - .withEntityType("Publication") .withIssueDate("2015-8-3") .makeUnDiscoverable() .build(); From 666460ec0facb7dfbb1844be2a35d768cb9ed6d9 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 21 Nov 2023 13:20:31 +0100 Subject: [PATCH 591/686] [DSC-1361] Fixes Integration Tests --- .../authenticate/PasswordAuthentication.java | 16 ++-- .../security/MetadataSecurityServiceImpl.java | 20 ++++- .../test/data/dspaceFolder/config/local.cfg | 2 +- .../converter/AInprogressItemConverter.java | 3 +- .../rest/AuthenticationRestControllerIT.java | 54 ------------- .../rest/AuthorizationRestRepositoryIT.java | 1 - .../app/rest/BitstreamRestControllerIT.java | 81 +++++++++++-------- .../dspace/app/rest/ItemRestRepositoryIT.java | 3 +- .../registries/relationship-formats.xml | 20 +++++ 9 files changed, 100 insertions(+), 100 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 6d1ca862d307..3709eb2b6be6 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -23,6 +23,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -52,12 +53,14 @@ public class PasswordAuthentication */ private static final Logger log = LogManager.getLogger(); + private static final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final String PASSWORD_AUTHENTICATED = "password.authenticated"; private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - /** * Look to see if this email address is allowed to register. *

    @@ -76,8 +79,7 @@ public boolean canSelfRegister(Context context, String email) throws SQLException { // Is there anything set in domain.valid? - String[] domains = DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("authentication-password.domain.valid"); + String[] domains = configurationService.getArrayProperty("authentication-password.domain.valid"); if ((domains == null) || (domains.length == 0)) { // No conditions set, so must be able to self register return true; @@ -146,8 +148,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) && StringUtils.isNotBlank( EPersonServiceFactory.getInstance().getEPersonService().getPasswordHash(context.getCurrentUser()) .toString())) { - String groupName = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("authentication-password.login.specialgroup"); + String groupName = configurationService.getProperty("authentication-password.login.specialgroup"); if ((groupName != null) && !groupName.trim().isEmpty()) { Group specialGroup = EPersonServiceFactory.getInstance().getGroupService() .findByName(context, groupName); @@ -169,6 +170,11 @@ public List getSpecialGroups(Context context, HttpServletRequest request) return Collections.EMPTY_LIST; } + @Override + public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return true; + } + /** * Check credentials: username must match the email address of an * EPerson record, and that EPerson must be allowed to login. diff --git a/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java index 7ea41807c11f..72cb8ba80cc3 100644 --- a/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java @@ -145,6 +145,14 @@ private List getPermissionFilteredMetadata(Context context, Item } + private boolean canEditItem(Context context, Item item) { + try { + return this.itemService.canEdit(context, item); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private List findBoxes(Context context, Item item, boolean preventBoxSecurityCheck) { if (context == null || preventBoxSecurityCheck) { // the context could be null if the converter is used to prepare test data or in a batch script @@ -169,7 +177,11 @@ private boolean isMetadataFieldVisible(Context context, List boxe if (CollectionUtils.isNotEmpty(boxes)) { return isMetadataFieldVisibleByBoxes(context, boxes, item, metadataField, preventBoxSecurityCheck); } - return isNotAdmin(context) ? isNotHidden(context, metadataField) : true; + return isNotAdmin(context) ? isMetadataFieldVisibleFor(context, item, metadataField) : true; + } + + private boolean isMetadataFieldVisibleFor(Context context, Item item, MetadataField metadataField) { + return canEditItem(context, item) || isNotHidden(context, metadataField); } private boolean isMetadataValueReturnAllowed(Context context, Item item, MetadataValue metadataValue) { @@ -210,7 +222,7 @@ private boolean isMetadataFieldVisibleByBoxes(Context context, List dcInputsSet(final String sd) { } } + private boolean isAdmin(Context context) { + return !isNotAdmin(context); + } + private boolean isNotAdmin(Context context) { try { return context == null || !authorizeService.isAdmin(context); diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 8ff8df9944c4..3618d405f7f2 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -43,7 +43,7 @@ dspace.server.url = http://localhost db.driver = org.h2.Driver db.dialect=org.hibernate.dialect.H2Dialect # Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors -db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=ROW\,VALUE;TIME ZONE=UTC +db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=ROW\,VALUE;TIME ZONE=UTC;DB_CLOSE_ON_EXIT=FALSE db.username = sa db.password = # H2's default schema is PUBLIC diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java index 9ae71a04bebf..c936f9b622f9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java @@ -100,8 +100,7 @@ protected void fillFromModel(T obj, R witem, Projection projection) { addValidationErrorsToItem(obj, witem); - SubmissionDefinitionRest def = converter.toRest( - submissionConfigService.getSubmissionConfigByCollection(collection.getHandle()), projection); + SubmissionDefinitionRest def = converter.toRest(getSubmissionConfig(item, collection), projection); witem.setSubmissionDefinition(def); storeSubmissionName(def.getName()); for (SubmissionSectionRest sections : def.getPanels()) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java index 52df181c8d64..05dab9905681 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java @@ -1594,60 +1594,6 @@ public void testGenerateMachineTokenToDownloadBitstream() throws Exception { } - @Test - public void testGenerateMachineTokenWithSpecialGroups() throws Exception { - context.turnOffAuthorisationSystem(); - - EPerson user = EPersonBuilder.createEPerson(context) - .withCanLogin(true) - .withPassword(password) - .withEmail("myuser@test.com") - .build(); - - Group specialGroup = GroupBuilder.createGroup(context) - .withName("Special group") - .build(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent community") - .build(); - - Collection collection = CollectionBuilder.createCollection(context, parentCommunity) - .withName("Collection") - .build(); - - Item item = ItemBuilder.createItem(context, collection) - .withReaderGroup(specialGroup) - .build(); - - context.restoreAuthSystemState(); - - String token = getAuthToken(user.getEmail(), password); - - getClient(token).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isForbidden()); - - configurationService.setProperty("authentication-password.login.specialgroup", "Special group"); - - token = getAuthToken(user.getEmail(), password); - - configurationService.setProperty("authentication-password.login.specialgroup", null); - - getClient(token).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isOk()); - - AtomicReference machineToken = new AtomicReference<>(); - - getClient(token).perform(post("/api/authn/machinetokens")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.token", notNullValue())) - .andExpect(jsonPath("$.type", is("machinetoken"))) - .andDo(result -> machineToken.set(read(result.getResponse().getContentAsString(), "$.token"))); - - getClient(machineToken.get()).perform(get("/api/core/items/" + item.getID())) - .andExpect(status().isOk()); - } - @Test public void testGenerateMachineTokenWithAnonymousUser() throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java index 67185f2cdab2..6a51649b5bce 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthorizationRestRepositoryIT.java @@ -2796,7 +2796,6 @@ public void verifySpecialGroupForNonAdministrativeUsersTest() throws Exception { simpleArticle.getInputStream()) .withIssueDate("2022-07-15") .withSubject("Entry") - .withEntityType("Publication") .grantLicense() .build(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 72d783ef2299..f3e8d17f1bfc 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -99,6 +99,8 @@ */ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest { + public static final String[] PASS_ONLY = {"org.dspace.authenticate.PasswordAuthentication"}; + protected SolrLoggerService solrLoggerService = StatisticsServiceFactory.getInstance().getSolrLoggerService(); @Autowired @@ -744,52 +746,63 @@ public void restrictedGroupBitstreamForbiddenTest() throws Exception { @Test public void restrictedSpecialGroupBitstreamTest() throws Exception { - context.turnOffAuthorisationSystem(); - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); + String authenticationMethod = + configurationService.getProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod"); - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) - .withName("Collection 1") - .build(); + try { - Group restrictedGroup = GroupBuilder.createGroup(context) - .withName("Restricted Group") - .build(); + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", PASS_ONLY); - String bitstreamContent = "Private!"; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + context.turnOffAuthorisationSystem(); - Item item = ItemBuilder.createItem(context, col1) - .withTitle("item 1") - .withIssueDate("2013-01-17") - .withAuthor("Doe, John") - .build(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); - bitstream = BitstreamBuilder - .createBitstream(context, item, is) - .withName("Test Embargoed Bitstream") - .withDescription("This bitstream is embargoed") - .withMimeType("text/plain") - .withReaderGroup(restrictedGroup) - .build(); - } + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); - context.restoreAuthSystemState(); + Group restrictedGroup = GroupBuilder.createGroup(context) + .withName("Restricted Group") + .build(); - String authToken = getAuthToken(eperson.getEmail(), password); - getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) - .andExpect(status().isForbidden()); + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - configurationService.setProperty("authentication-password.login.specialgroup", "Restricted Group"); + Item item = ItemBuilder.createItem(context, col1) + .withTitle("item 1") + .withIssueDate("2013-01-17") + .withAuthor("Doe, John") + .build(); - authToken = getAuthToken(eperson.getEmail(), password); - getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) - .andExpect(status().isOk()); + bitstream = BitstreamBuilder + .createBitstream(context, item, is) + .withName("Test Embargoed Bitstream") + .withDescription("This bitstream is embargoed") + .withMimeType("text/plain") + .withReaderGroup(restrictedGroup) + .build(); + } + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isForbidden()); - checkNumberOfStatsRecords(bitstream, 1); + configurationService.setProperty("authentication-password.login.specialgroup", "Restricted Group"); + authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + + checkNumberOfStatsRecords(bitstream, 1); + + } finally { + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", authenticationMethod); + } } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 8094cf539c35..29caf8def121 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -12,6 +12,7 @@ import static org.dspace.app.matcher.OrcidQueueMatcher.matches; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataNotEmpty; import static org.dspace.builder.OrcidHistoryBuilder.createOrcidHistory; import static org.dspace.builder.OrcidQueueBuilder.createOrcidQueue; import static org.dspace.core.Constants.READ; @@ -3145,7 +3146,7 @@ public void testHiddenMetadataForUserWithWriteRights() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$", ItemMatcher.matchItemProperties(item))) .andExpect(jsonPath("$.metadata", matchMetadata("dc.title", "Public item 1"))) - .andExpect(jsonPath("$.metadata", matchMetadata("dc.description.provenance", "Provenance data"))); + .andExpect(jsonPath("$.metadata", matchMetadataNotEmpty("dc.description.provenance"))); } diff --git a/dspace/config/registries/relationship-formats.xml b/dspace/config/registries/relationship-formats.xml index f2f50182facd..00e965055f07 100644 --- a/dspace/config/registries/relationship-formats.xml +++ b/dspace/config/registries/relationship-formats.xml @@ -287,4 +287,24 @@ Contains all uuids of PROJECTS which link to the current FUNDING AGENCY via a "latest" relationship. In other words, this stores all relationships pointing to the current FUNDING AGENCY from any PROJECT, implying that the FUNDING AGENCY is marked as "latest". Internally used by DSpace to support versioning. Do not manually add, remove or edit values. + + relation + isCorrectionOfItem + + + relation + isCorrectionOfItem + latestForDiscovery + + + + relation + isCorrectedByItem + + + relation + isCorrectedByItem + latestForDiscovery + + From cbe50d9b33ae0820ca1fca24b8ffe6b71074f66c Mon Sep 17 00:00:00 2001 From: Davide Negretti Date: Tue, 21 Nov 2023 13:22:44 +0100 Subject: [PATCH 592/686] [DSC-552] Fix metric popup positions --- dspace/config/modules/metrics.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace/config/modules/metrics.cfg b/dspace/config/modules/metrics.cfg index 0bdaaa25967a..f93df199aaa5 100644 --- a/dspace/config/modules/metrics.cfg +++ b/dspace/config/modules/metrics.cfg @@ -39,7 +39,7 @@ altmetric.badgeType = donut altmetric.list-badgeType = donut # Possible values: left right top bottom -altmetric.popover = left +altmetric.popover = bottom altmetric.list-popover = left # Possible values: right @@ -67,7 +67,7 @@ altmetric.list-linkTarget = _blank dimensions.data-badge-enabled = true dimensions.list-data-badge-enabled = true -dimensions.data-legend = hover-left +dimensions.data-legend = hover-bottom dimensions.list-data-legend = hover-left dimensions.data-style = small_circle @@ -123,7 +123,7 @@ plumx.list-data-no-thumbnail = true plumx.list-data-no-artifacts = true # properties for publication widget -plumx.list-data-popup = bottom +plumx.list-data-popup = left plumx.list-data-hide-when-empty = true plumx.list-data-hide-usage = false plumx.list-data-hide-captures = false From 216aacd92d9eafd19be30938a24bbebb26de913c Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 21 Nov 2023 14:41:29 +0100 Subject: [PATCH 593/686] [DSC-1361] Fixes checkstyle --- .../java/org/dspace/app/rest/BitstreamRestControllerIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index f3e8d17f1bfc..705aa162a2fd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -801,7 +801,10 @@ public void restrictedSpecialGroupBitstreamTest() throws Exception { checkNumberOfStatsRecords(bitstream, 1); } finally { - configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", authenticationMethod); + configurationService.setProperty( + "plugin.sequence.org.dspace.authenticate.AuthenticationMethod", + authenticationMethod + ); } } From a47f17acf77b319deffcf785e28dc7764629e6b7 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 21 Nov 2023 14:48:42 +0100 Subject: [PATCH 594/686] [DSC-1361] Fixes Context NPE --- .../dspace/content/security/MetadataSecurityServiceImpl.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java index 72cb8ba80cc3..603ddb228f25 100644 --- a/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/security/MetadataSecurityServiceImpl.java @@ -146,6 +146,9 @@ private List getPermissionFilteredMetadata(Context context, Item } private boolean canEditItem(Context context, Item item) { + if (context == null) { + return false; + } try { return this.itemService.canEdit(context, item); } catch (SQLException e) { From 3859adeb84b4a4feb703ab053aaa3a5883c2f8e5 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 22 Nov 2023 09:54:51 +0100 Subject: [PATCH 595/686] [DSC-1361] Fixes Context special groups cache issue --- .../java/org/dspace/authenticate/PasswordAuthentication.java | 4 ---- dspace-api/src/main/java/org/dspace/core/Context.java | 3 +++ .../src/main/java/org/dspace/core/ContextReadOnlyCache.java | 4 ++++ .../java/org/dspace/app/rest/BitstreamRestControllerIT.java | 3 +++ 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 3709eb2b6be6..328f4b4a8e38 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -170,10 +170,6 @@ public List getSpecialGroups(Context context, HttpServletRequest request) return Collections.EMPTY_LIST; } - @Override - public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { - return true; - } /** * Check credentials: username must match the email address of an diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index d5b16b4ee16b..142fb8acaa31 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -665,6 +665,9 @@ public boolean isReadOnly() { */ public void setSpecialGroup(UUID groupID) { specialGroups.add(groupID); + if (getCachedAllMemberGroupsSet(currentUser) != null) { + readOnlyCache.clearAllMembershipGroupCache(currentUser); + } } /** diff --git a/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java b/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java index 9a6e5bfc0706..e4fdb4c7c1d4 100644 --- a/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java +++ b/dspace-api/src/main/java/org/dspace/core/ContextReadOnlyCache.java @@ -95,4 +95,8 @@ private Pair buildGroupMembershipKey(Group group, EPerson eperso eperson == null ? "" : eperson.getID().toString()); } + protected void clearAllMembershipGroupCache(EPerson eperson) { + allMemberGroupsCache.remove(buildAllMembersGroupKey(eperson)); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 705aa162a2fd..92cff6db2192 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -36,6 +36,7 @@ import static org.springframework.http.MediaType.parseMediaType; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.head; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header; @@ -792,6 +793,8 @@ public void restrictedSpecialGroupBitstreamTest() throws Exception { getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) .andExpect(status().isForbidden()); + getClient(authToken).perform(post("/api/authn/logout")).andExpect(status().isNoContent()); + configurationService.setProperty("authentication-password.login.specialgroup", "Restricted Group"); authToken = getAuthToken(eperson.getEmail(), password); From 13966962e86f72cc1e590aa8f4414c40abdaa893 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Wed, 22 Nov 2023 09:45:51 +0100 Subject: [PATCH 596/686] [DSC-1225] skip empty metadata during RDF conversion --- .../org/dspace/rdf/conversion/MetadataConverterPlugin.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java b/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java index 72ba03d99d27..0367556b8eab 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java +++ b/dspace-api/src/main/java/org/dspace/rdf/conversion/MetadataConverterPlugin.java @@ -136,6 +136,10 @@ public Model convert(Context context, DSpaceObject dso) List metadata_values = dsoService .getMetadata(dso, MetadataSchemaEnum.DC.getName(), Item.ANY, Item.ANY, Item.ANY); for (MetadataValue value : metadata_values) { + // skip empty values + if (value == null || StringUtils.isBlank(value.getValue())) { + continue; + } MetadataField metadataField = value.getMetadataField(); MetadataSchema metadataSchema = metadataField.getMetadataSchema(); String fieldname = metadataSchema.getName() + "." + metadataField.getElement(); From 4cc0753965e09fa3f2968bf6bc03a78a7d308bd2 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 22 Nov 2023 12:28:55 +0100 Subject: [PATCH 597/686] [DSC-1361] Fixes ITs failures --- .../main/java/org/dspace/core/Context.java | 10 +- .../dspaceFolder/config/submission-forms.xml | 79 +++------ .../app/rest/SubmissionFormsControllerIT.java | 53 +++--- .../app/rest/iiif/IIIFControllerIT.java | 166 ++++++++++-------- 4 files changed, 152 insertions(+), 156 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 142fb8acaa31..2dad6733f7f9 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -665,9 +665,7 @@ public boolean isReadOnly() { */ public void setSpecialGroup(UUID groupID) { specialGroups.add(groupID); - if (getCachedAllMemberGroupsSet(currentUser) != null) { - readOnlyCache.clearAllMembershipGroupCache(currentUser); - } + clearCachedGroups(); } /** @@ -956,6 +954,12 @@ public void cacheAllMemberGroupsSet(EPerson ePerson, Set groups) { } } + protected void clearCachedGroups() { + if (isReadOnly()) { + readOnlyCache.clearAllMembershipGroupCache(currentUser); + } + } + public Set getCachedAllMemberGroupsSet(EPerson ePerson) { if (isReadOnly()) { return readOnlyCache.getCachedAllMemberGroupsSet(ePerson); diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 2aa9ac516720..fe47289e4315 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -48,7 +48,7 @@ -
    + @@ -240,7 +240,7 @@ it, please enter the types and the actual numbers or codes. -
    + dc @@ -380,7 +380,7 @@ it, please enter the types and the actual numbers or codes. -
    + dc @@ -432,7 +432,7 @@ it, please enter the types and the actual numbers or codes. -
    + dc @@ -484,7 +484,7 @@ it, please enter the types and the actual numbers or codes. -
    + dc @@ -536,7 +536,7 @@ it, please enter the types and the actual numbers or codes. -
    + isJournalOfVolume @@ -1038,8 +1038,8 @@ it, please enter the types and the actual numbers or codes. - -
    + + dc @@ -1060,7 +1060,7 @@ it, please enter the types and the actual numbers or codes. false The Approval date. -You can leave out the day and/or month if they aren't applicable. + You can leave out the day and/or month if they aren't applicable. @@ -1073,7 +1073,7 @@ You can leave out the day and/or month if they aren't applicable. false The registration date of the patent. -You can leave out the day and/or month if they aren't applicable. + You can leave out the day and/or month if they aren't applicable. @@ -1169,8 +1169,8 @@ You can leave out the day and/or month if they aren't applicable. The date Filled. - -
    + +
    dc @@ -1206,8 +1206,8 @@ You can leave out the day and/or month if they aren't applicable. Enter the description of the patent. - -
    + +
    dc @@ -1243,8 +1243,8 @@ You can leave out the day and/or month if they aren't applicable. Result outputs that are referenced by this patent - - + +
    @@ -1703,7 +1703,7 @@ You can leave out the day and/or month if they aren't applicable. -
    + dc @@ -1772,7 +1772,7 @@ You can leave out the day and/or month if they aren't applicable. -
    + dc @@ -1822,9 +1822,9 @@ You can leave out the day and/or month if they aren't applicable. publication-coar-types - + -
    + dc @@ -1860,9 +1860,9 @@ You can leave out the day and/or month if they aren't applicable. - + -
    + dc @@ -1910,8 +1910,8 @@ You can leave out the day and/or month if they aren't applicable. You must enter at least the year. - -
    + +
    dc @@ -1925,7 +1925,7 @@ You can leave out the day and/or month if they aren't applicable. - +
    @@ -2073,35 +2073,6 @@ You can leave out the day and/or month if they aren't applicable. - -
    - - - dc - title - - false - - onebox - Field required - - - - -
    - - - dc - type - - false - - onebox - Field required - - - - diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index b9f4c3e29ea3..27a7ccb0985d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -73,13 +73,13 @@ public void findAll() throws Exception { .andExpect(content().contentType(contentType)) //The configuration file for the test env includes PAGE_TOTAL_ELEMENTS forms .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(1))) + .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalPages", equalTo(2))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect( jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - //The array of submissionforms should have a size of 8 - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10)))) + //The array of submissionforms should have a size of 34 + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(20)))) ; } @@ -90,12 +90,12 @@ public void findAllWithNewlyCreatedAccountTest() throws Exception { .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(1))) + .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalPages", equalTo(2))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10)))); + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(20)))); } @Test @@ -670,38 +670,38 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + Matchers.containsString("page=16"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(5))) + .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalPages", equalTo(17))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") .param("size", "2") - .param("page", "1")) + .param("page", "15")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-workflow-hidden"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("languagetest"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("publication_indexing"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + Matchers.containsString("page=14"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=1"), Matchers.containsString("size=2")))) + Matchers.containsString("page=15"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.next.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=2"), Matchers.containsString("size=2")))) + Matchers.containsString("page=16"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + Matchers.containsString("page=16"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(5))) - .andExpect(jsonPath("$.page.number", is(1))); + .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalPages", equalTo(17))) + .andExpect(jsonPath("$.page.number", is(15))); } @Test @@ -726,18 +726,13 @@ public void visibilityTest() throws Exception { matchFormWithVisibility("Subject(s)", Map.of("submission", "hidden", "workflow", "read-only", "edit", "read-only")), matchFormWithVisibility("Description", Map.of("submission", "hidden")) - ))) - .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(5))) - .andExpect(jsonPath("$.page.number", is(3))); + ))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") .param("size", "2") .param("page", "4")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -749,10 +744,10 @@ public void visibilityTest() throws Exception { Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + Matchers.containsString("page=16"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(10))) - .andExpect(jsonPath("$.page.totalPages", equalTo(5))) + .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalPages", equalTo(17))) .andExpect(jsonPath("$.page.number", is(4))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java index 6eac415712b3..accc99f44de3 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/iiif/IIIFControllerIT.java @@ -7,7 +7,9 @@ */ package org.dspace.app.rest.iiif; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; @@ -129,7 +131,7 @@ public void findOneIIIFSearchableItemWithDefaultDimensionsIT() throws Exception .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.service.profile", is("http://iiif.io/api/search/0/search"))) - .andExpect(jsonPath("$.thumbnail.@id", Matchers.containsString("/iiif/2/" + .andExpect(jsonPath("$.thumbnail.@id", containsString("/iiif/2/" + bitstream1.getID()))) .andExpect(jsonPath("$.metadata[0].label", is("Title"))) .andExpect(jsonPath("$.metadata[0].value", is("Public item 1"))) @@ -139,7 +141,7 @@ public void findOneIIIFSearchableItemWithDefaultDimensionsIT() throws Exception .andExpect(jsonPath("$.metadata[2].value[0]", is("Smith, Donald"))) .andExpect(jsonPath("$.metadata[2].value[1]", is("Doe, John"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas"))) + containsString("/iiif/" + publicItem1.getID() + "/canvas"))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(64))) .andExpect(jsonPath("$.sequences[0].canvases[0].height", is(64))) @@ -157,14 +159,14 @@ public void findOneIIIFSearchableItemWithDefaultDimensionsIT() throws Exception .andExpect(jsonPath("$.sequences[0].canvases[0].metadata[4].value", is("11e23c5702595ba512c1c2ee8e8d6153 (MD5)"))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].label", is("2"))) .andExpect(jsonPath("$.sequences[0].canvases[1].images[0].resource.service.@id", Matchers.endsWith(bitstream2.getID().toString()))) .andExpect(jsonPath("$.structures").doesNotExist()) .andExpect(jsonPath("$.related.@id", - Matchers.containsString("/items/" + publicItem1.getID()))); + containsString("/items/" + publicItem1.getID()))); } @Test @@ -212,13 +214,13 @@ public void findOneIIIFSearchableWithMixedConfigIT() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(3163))) .andExpect(jsonPath("$.sequences[0].canvases[0].height", is(4220))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].label", is("Global 2"))) .andExpect(jsonPath("$.sequences[0].canvases[1].width", is(2000))) @@ -231,43 +233,55 @@ public void findOneIIIFSearchableWithMixedConfigIT() throws Exception { public void findOneWithExcludedBitstreamIT() throws Exception { context.turnOffAuthorisationSystem(); parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); + .withName("Parent Community") + .build(); Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") - .build(); + .build(); Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Public item 1") - .withIssueDate("2017-10-17") - .withAuthor("Smith, Donald").withAuthor("Doe, John") - .enableIIIF() - .build(); + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .enableIIIF() + .build(); String bitstreamContent = "ThisIsSomeText"; + Bitstream bitstream1; try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - BitstreamBuilder - .createBitstream(context, publicItem1, is) - .withName("Bitstream1.jpg") - .withMimeType("image/jpeg") - .withIIIFLabel("Custom Label") - .build(); + bitstream1 = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream1.jpg") + .withMimeType("image/jpeg") + .withIIIFLabel("Custom Label") + .build(); } + Bitstream bitstream2; try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - BitstreamBuilder - .createBitstream(context, publicItem1, is) - .withName("Bitstream2.jpg") - .withMimeType("image/jpeg") - .withIIIFDisabled() - .build(); + bitstream2 = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFDisabled() + .build(); } context.restoreAuthSystemState(); // Expect canvas label, width and height to match bitstream description. getClient().perform(get("/iiif/" + publicItem1.getID() + "/manifest")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) - .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) - .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/c0"))) - .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) + .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) + .andExpect(jsonPath("$.sequences[0].canvases[0].@id", + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream1.getID().toString() + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[*].@id", + not( + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream2.getID().toString() + ) + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); } @Test @@ -286,8 +300,9 @@ public void findOneWithExcludedBitstreamBundleIT() throws Exception { .build(); String bitstreamContent = "ThisIsSomeText"; + Bitstream bitstream1 = null; try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - BitstreamBuilder + bitstream1 = BitstreamBuilder .createBitstream(context, publicItem1, is) .withName("Bitstream1.jpg") .withMimeType("image/jpeg") @@ -295,8 +310,9 @@ public void findOneWithExcludedBitstreamBundleIT() throws Exception { .build(); } // Add bitstream + Bitstream bitstream2 = null; try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - BitstreamBuilder + bitstream2 = BitstreamBuilder .createBitstream(context, publicItem1, is, "ExcludedBundle", false) .withName("Bitstream2.jpg") .withMimeType("image/jpeg") @@ -309,7 +325,17 @@ public void findOneWithExcludedBitstreamBundleIT() throws Exception { .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/c0"))) + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream1.getID().toString() + ) + )) + .andExpect(jsonPath("$.sequences[0].canvases[*].@id", + not( + containsString( + "/iiif/" + publicItem1.getID().toString() + "/canvas/" + bitstream2.getID().toString() + ) + ) + )) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))); } @@ -349,7 +375,7 @@ public void findOneIIIFSearchableWithCustomBundleAndConfigIT() throws Exception .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(3163))) @@ -503,7 +529,7 @@ public void findOneWithStructures() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Global 1"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(2000))) @@ -522,16 +548,16 @@ public void findOneWithStructures() throws Exception { Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-0"))) .andExpect(jsonPath("$.structures[1].label", is("Section 1"))) .andExpect(jsonPath("$.structures[1].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.structures[2].@id", Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-1"))) .andExpect(jsonPath("$.structures[2].label", is("Section 2"))) .andExpect(jsonPath("$.structures[2].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.structures[2].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream3.getID().toString()))) .andExpect(jsonPath("$.service").exists()); } @@ -714,45 +740,45 @@ public void findOneWithHierarchicalStructures() throws Exception { .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(8))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].images[0].resource.@id", - Matchers.containsString(bitstream1.getID().toString()))) + containsString(bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].images[0].resource.@id", - Matchers.containsString(bitstream2.getID().toString()))) + containsString(bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[2].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream3.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[2].images[0].resource.@id", - Matchers.containsString(bitstream3.getID().toString()))) + containsString(bitstream3.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[3].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream4.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[3].images[0].resource.@id", - Matchers.containsString(bitstream4.getID().toString()))) + containsString(bitstream4.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[4].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream5.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[4].images[0].resource.@id", - Matchers.containsString(bitstream5.getID().toString()))) + containsString(bitstream5.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[5].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream6.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[5].images[0].resource.@id", - Matchers.containsString(bitstream6.getID().toString()))) + containsString(bitstream6.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[6].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream7.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[6].images[0].resource.@id", - Matchers.containsString(bitstream7.getID().toString()))) + containsString(bitstream7.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[7].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream8.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[7].images[0].resource.@id", - Matchers.containsString(bitstream8.getID().toString()))) + containsString(bitstream8.getID().toString()))) .andExpect(jsonPath("$.structures[0].@id", Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0"))) // the toc contains two top sections 1 & 2 without direct children canvases @@ -775,20 +801,20 @@ public void findOneWithHierarchicalStructures() throws Exception { Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-0-1"))) .andExpect(jsonPath("$.structures[1].canvases", Matchers.hasSize(2))) .andExpect(jsonPath("$.structures[1].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.structures[1].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream5.getID().toString()))) // section 1 > a contains bitstream 2 and 3, no sub sections .andExpect(jsonPath("$.structures[2].label", is("a"))) .andExpect(jsonPath("$.structures[2].ranges").doesNotExist()) .andExpect(jsonPath("$.structures[2].canvases", Matchers.hasSize(2))) .andExpect(jsonPath("$.structures[2].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.structures[2].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream3.getID().toString()))) // section 1 > b contains only the bitstream 4 and no sub sections .andExpect(jsonPath("$.structures[3].@id", @@ -797,7 +823,7 @@ public void findOneWithHierarchicalStructures() throws Exception { .andExpect(jsonPath("$.structures[3].ranges").doesNotExist()) .andExpect(jsonPath("$.structures[3].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.structures[3].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream4.getID().toString()))) // section 2 contains bitstream 6 and 7, sub section "sub 2-1" .andExpect(jsonPath("$.structures[4].label", is("Section 2"))) @@ -806,10 +832,10 @@ public void findOneWithHierarchicalStructures() throws Exception { Matchers.endsWith("/iiif/" + publicItem1.getID() + "/manifest/range/r0-1-0"))) .andExpect(jsonPath("$.structures[4].canvases", Matchers.hasSize(2))) .andExpect(jsonPath("$.structures[4].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream6.getID().toString()))) .andExpect(jsonPath("$.structures[4].canvases[1]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream7.getID().toString()))) // section 2 > sub 2-1 contains only the bitstream 8 no sub sections .andExpect(jsonPath("$.structures[5].@id", @@ -818,7 +844,7 @@ public void findOneWithHierarchicalStructures() throws Exception { .andExpect(jsonPath("$.structures[5].ranges").doesNotExist()) .andExpect(jsonPath("$.structures[5].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.structures[5].canvases[0]", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream8.getID().toString()))) .andExpect(jsonPath("$.service").exists()); } @@ -931,7 +957,7 @@ public void findOneIIIFNotSearcheableIT() throws Exception { .andExpect(jsonPath("$.license", is("https://license.org"))) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.service").doesNotExist()); @@ -981,7 +1007,7 @@ public void findOneIIIFWithOtherContentIT() throws Exception { .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.seeAlso.@type", is("sc:AnnotationList"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.service").doesNotExist()); @@ -1036,7 +1062,7 @@ public void findOneUsingOriginalBundleIgnoreFileIT() throws Exception { .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases", Matchers.hasSize(1))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + publicItem1.getID() + "/canvas/" + containsString("/iiif/" + publicItem1.getID() + "/canvas/" + bitstream.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("1"))) .andExpect(jsonPath("$.rendering.@id", @@ -1113,13 +1139,13 @@ public void findOneIIIFRestrictedItem() throws Exception { .andExpect(status().isOk()) .andExpect(jsonPath("$.@context", is("http://iiif.io/api/presentation/2/context.json"))) .andExpect(jsonPath("$.sequences[0].canvases[0].@id", - Matchers.containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + bitstream1.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[0].label", is("Custom Label"))) .andExpect(jsonPath("$.sequences[0].canvases[0].width", is(3163))) .andExpect(jsonPath("$.sequences[0].canvases[0].height", is(4220))) .andExpect(jsonPath("$.sequences[0].canvases[1].@id", - Matchers.containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + containsString("/iiif/" + restrictedItem1.getID() + "/canvas/" + bitstream2.getID().toString()))) .andExpect(jsonPath("$.sequences[0].canvases[1].label", is("Global 2"))) .andExpect(jsonPath("$.sequences[0].canvases[1].width", is(2000))) @@ -1255,7 +1281,7 @@ public void getAnnotationListSeeAlso() throws Exception { .andExpect(jsonPath("$.resources[0].@type", is("oa:Annotation"))) .andExpect(jsonPath("$.resources[0].motivation", is ("oa:linking"))) .andExpect(jsonPath("$.resources[0].resource.@id", - Matchers.containsString(bitstream2.getID() + "/content"))); + containsString(bitstream2.getID() + "/content"))); } From 3c2f826ce7a1ffeeb4fab5d94d2d15682ea9a921 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 22 Nov 2023 17:02:52 +0100 Subject: [PATCH 598/686] [DSC-1361] Fixes specialgroup evaluation for PasswordAuthentication --- .../authenticate/PasswordAuthentication.java | 15 +++++++++++++++ .../src/main/java/org/dspace/core/Context.java | 6 ------ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 328f4b4a8e38..8eb29debf7cb 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -277,4 +277,19 @@ public boolean canChangePassword(Context context, EPerson ePerson, String curren } return ePersonService.checkPassword(context, ePerson, currentPassword); } + + @Override + public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return isPasswordAuthenticationMethodInContext(context, request) || + isPasswordAuthenticatedInRequest(context, request); + } + + private boolean isPasswordAuthenticatedInRequest(Context context, HttpServletRequest request) { + return StringUtils.isBlank(context.getAuthenticationMethod()) && + (Boolean) request.getAttribute(PASSWORD_AUTHENTICATED); + } + + private boolean isPasswordAuthenticationMethodInContext(Context context, HttpServletRequest request) { + return AuthenticationMethod.super.areSpecialGroupsApplicable(context, request); + } } diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 2dad6733f7f9..4d3079240c3d 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -665,7 +665,6 @@ public boolean isReadOnly() { */ public void setSpecialGroup(UUID groupID) { specialGroups.add(groupID); - clearCachedGroups(); } /** @@ -954,11 +953,6 @@ public void cacheAllMemberGroupsSet(EPerson ePerson, Set groups) { } } - protected void clearCachedGroups() { - if (isReadOnly()) { - readOnlyCache.clearAllMembershipGroupCache(currentUser); - } - } public Set getCachedAllMemberGroupsSet(EPerson ePerson) { if (isReadOnly()) { From bd6845c47c45214119f6a49019c446418001e9aa Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 22 Nov 2023 17:52:28 +0100 Subject: [PATCH 599/686] [DSC-1361] Fixes NPE in ITs --- .../org/dspace/authenticate/PasswordAuthentication.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 8eb29debf7cb..0bf0f9bcbc95 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -11,6 +11,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Optional; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -285,8 +286,10 @@ public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest re } private boolean isPasswordAuthenticatedInRequest(Context context, HttpServletRequest request) { - return StringUtils.isBlank(context.getAuthenticationMethod()) && - (Boolean) request.getAttribute(PASSWORD_AUTHENTICATED); + return (context == null || StringUtils.isBlank(context.getAuthenticationMethod())) && + request != null && Optional.ofNullable(request.getAttribute(PASSWORD_AUTHENTICATED)) + .map(Boolean.class::cast) + .orElse(false); } private boolean isPasswordAuthenticationMethodInContext(Context context, HttpServletRequest request) { From 6abd4c40e623f0641962d7dde2b488a1837fd69b Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 23 Nov 2023 10:05:38 +0100 Subject: [PATCH 600/686] [DSC-1361] Fixes VersioningWithRelationshipsIT errors --- ...HandleIdentifierProviderWithCanonicalHandles.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 9993f78b4dd5..46f2d767ea6c 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -20,6 +20,7 @@ import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataValue; import org.dspace.content.service.ItemService; +import org.dspace.content.service.MetadataValueService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; @@ -67,6 +68,9 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + @Autowired() + private MetadataValueService metadataValueService; + /** * After all the properties are set check that the versioning is enabled * @@ -504,12 +508,17 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) String handleref = handleService.getCanonicalForm(handle); List identifiers = itemService .getMetadata(item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY); - itemService.clearMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY); for (MetadataValue identifier : identifiers) { if (this.supports(identifier.getValue())) { // ignore handles continue; } + + identifiers.remove(identifier); + metadataValueService.delete(context, identifier); + + context.uncacheEntity(identifier); + itemService.addMetadata(context, item, identifier.getMetadataField(), @@ -522,6 +531,7 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) itemService.addMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", null, handleref); } + itemService.setMetadataModified(item); itemService.update(context, item); } } From 310abb9e6cf3d84ba5c294b149b5264160c5ae07 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 23 Nov 2023 11:16:08 +0100 Subject: [PATCH 601/686] [DSC-1393] Fixes license error message --- .../dspace/validation/LicenseValidator.java | 2 +- .../org/dspace/harvest/OAIHarvesterIT.java | 10 +-- .../rest/WorkspaceItemRestRepositoryIT.java | 65 ++++++++++++++----- 3 files changed, 56 insertions(+), 21 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java b/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java index ffc67c20311c..c13cb0e89598 100644 --- a/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java +++ b/dspace-api/src/main/java/org/dspace/validation/LicenseValidator.java @@ -36,7 +36,7 @@ */ public class LicenseValidator implements SubmissionStepValidator { - private static final String ERROR_VALIDATION_LICENSEREQUIRED = "error.validation.license.notgranted"; + public static final String ERROR_VALIDATION_LICENSEREQUIRED = "error.validation.license.required"; private String name; diff --git a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java index b60f4b5e6939..e58788104aa7 100644 --- a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java +++ b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java @@ -74,6 +74,7 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.util.UUIDUtils; +import org.dspace.validation.LicenseValidator; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -1302,7 +1303,8 @@ public void testRunHarvestWithEmailSentIfItemValidationFails() throws Exception assertThat(errorDetails.getMessages(), hasSize(2)); assertThat(errorDetails.getMessages(), hasItem("error.validation.filerequired - [/sections/upload]")); assertThat(errorDetails.getMessages(), - hasItem("error.validation.license.notgranted - [/sections/license]")); + hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]") + ); verifyNoMoreInteractions(mockClient, mockEmailSender); @@ -1425,7 +1427,7 @@ public void testRunHarvestWithEmailSentIfItemAndRecordValidationFails() throws E List messages = errorDetails.getMessages(); assertThat(messages, hasSize(3)); assertThat(messages, hasItem("error.validation.filerequired - [/sections/upload]")); - assertThat(messages, hasItem("error.validation.license.notgranted - [/sections/license]")); + assertThat(messages, hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]")); assertThat(messages, hasItem("error.validation.required - [/sections/publication/dc.date.issued]")); errorDetails = errors.get("oai:test-harvest:Publications/123456789/1002"); @@ -1433,7 +1435,7 @@ public void testRunHarvestWithEmailSentIfItemAndRecordValidationFails() throws E messages = errorDetails.getMessages(); assertThat(messages, hasSize(3)); assertThat(messages, hasItem("error.validation.filerequired - [/sections/upload]")); - assertThat(messages, hasItem("error.validation.license.notgranted - [/sections/license]")); + assertThat(messages, hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]")); assertThat(errorDetails.getMessages(), hasItem(containsString("Element 'oai_cerif:Publishers' " + "cannot have character [children]"))); @@ -1442,7 +1444,7 @@ public void testRunHarvestWithEmailSentIfItemAndRecordValidationFails() throws E messages = errorDetails.getMessages(); assertThat(messages, hasSize(2)); assertThat(messages, hasItem("error.validation.filerequired - [/sections/upload]")); - assertThat(messages, hasItem("error.validation.license.notgranted - [/sections/license]")); + assertThat(messages, hasItem(LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + " - [/sections/license]")); verifyNoMoreInteractions(mockClient, mockEmailSender); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index 5cfcbbe0de95..c6b4821a950f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -112,6 +112,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.supervision.SupervisionOrder; import org.dspace.util.UUIDUtils; +import org.dspace.validation.LicenseValidator; import org.dspace.versioning.ItemCorrectionProvider; import org.hamcrest.Matchers; import org.junit.Before; @@ -130,6 +131,8 @@ */ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegrationTest { + public static final String LICENSE_ERROR_MESSAGE_PATH = + "$.errors[?(@.message=='" + LicenseValidator.ERROR_VALIDATION_LICENSEREQUIRED + "')]"; @Autowired private CollectionService cs; @Autowired @@ -4597,8 +4600,18 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect( + jsonPath( + + LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath( + "$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + ) + ) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4608,8 +4621,8 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4625,8 +4638,12 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath("$.paths", + contains(hasJsonPath("$", is("/sections/license")))) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4636,8 +4653,8 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem2.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4653,8 +4670,13 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath("$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4664,8 +4686,13 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem3.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath("$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4681,8 +4708,14 @@ public void patchRejectLicenseTest() throws Exception { .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( + hasJsonPath( + "$.paths", + contains(hasJsonPath("$", is("/sections/license"))) + ) + ) + )) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) @@ -4692,8 +4725,8 @@ public void patchRejectLicenseTest() throws Exception { // verify that the patch changes have been persisted getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem4.getID())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.errors[?(@.message=='error.validation.license.notgranted')]", - contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) + .andExpect(jsonPath(LICENSE_ERROR_MESSAGE_PATH, + contains( hasJsonPath("$.paths", contains(hasJsonPath("$", is("/sections/license"))))))) .andExpect(jsonPath("$.sections.license.granted", is(false))) .andExpect(jsonPath("$.sections.license.acceptanceDate").isEmpty()) From 9d3e24eb9b62d1846402a6030048d3e9ad584200 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 23 Nov 2023 11:20:38 +0100 Subject: [PATCH 602/686] [DSC-1361] Fixes VersioningWithRelationshipIT --- ...VersionedHandleIdentifierProviderWithCanonicalHandles.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 46f2d767ea6c..a6654f1a8a99 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Date; import java.util.List; @@ -508,6 +509,7 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) String handleref = handleService.getCanonicalForm(handle); List identifiers = itemService .getMetadata(item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY); + List toRemove = new ArrayList<>(); for (MetadataValue identifier : identifiers) { if (this.supports(identifier.getValue())) { // ignore handles @@ -515,6 +517,7 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) } identifiers.remove(identifier); + toRemove.add(identifier); metadataValueService.delete(context, identifier); context.uncacheEntity(identifier); @@ -527,6 +530,7 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) identifier.getAuthority(), identifier.getConfidence()); } + itemService.removeMetadataValues(context, item, toRemove); if (!StringUtils.isEmpty(handleref)) { itemService.addMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", null, handleref); From f18125612f1222572456001ed88e319a8367b199 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 23 Nov 2023 12:34:37 +0100 Subject: [PATCH 603/686] [DSC-1361] Fixes VersioningWithRelationshipIT --- .../VersionedHandleIdentifierProviderWithCanonicalHandles.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index a6654f1a8a99..62e802573530 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -520,8 +520,6 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) toRemove.add(identifier); metadataValueService.delete(context, identifier); - context.uncacheEntity(identifier); - itemService.addMetadata(context, item, identifier.getMetadataField(), From bfc66d0c36d18800d8a8f113a6f48b6693dd51d7 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 23 Nov 2023 12:51:05 +0100 Subject: [PATCH 604/686] [DSC-1361] Fixes VersioningWithRelationshipIT --- .../VersionedHandleIdentifierProviderWithCanonicalHandles.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 62e802573530..d73dfa448db9 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -529,6 +529,9 @@ protected void modifyHandleMetadata(Context context, Item item, String handle) identifier.getConfidence()); } itemService.removeMetadataValues(context, item, toRemove); + + item = context.reloadEntity(item); + if (!StringUtils.isEmpty(handleref)) { itemService.addMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", null, handleref); From 5b4a140442e97fd966498ecc14625982d4be68fe Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Fri, 24 Nov 2023 16:39:23 +0100 Subject: [PATCH 605/686] [DSC-1361] Update version constant --- dspace-api/src/main/java/org/dspace/core/CrisConstants.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java index 5bce199cfd76..1b4d1c9dc5c5 100644 --- a/dspace-api/src/main/java/org/dspace/core/CrisConstants.java +++ b/dspace-api/src/main/java/org/dspace/core/CrisConstants.java @@ -21,7 +21,7 @@ public class CrisConstants { * same number than the parent leading metadata */ public static final String PLACEHOLDER_PARENT_METADATA_VALUE = "#PLACEHOLDER_PARENT_METADATA_VALUE#"; - public static final String DSPACE_BASE_VERSION = "DSpace 7.5"; + public static final String DSPACE_BASE_VERSION = "DSpace 7.6.1"; public static final MetadataFieldName MD_ENTITY_TYPE = new MetadataFieldName("dspace", "entity", "type"); public static final MetadataFieldName MD_SUBMISSION_TYPE = new MetadataFieldName("cris", "submission", "definition"); From 9dc73b9f62fb323f7e67c616ecc3ddfb52392f00 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Fri, 24 Nov 2023 17:19:45 +0100 Subject: [PATCH 606/686] [DSC-1339] Remove detect-duplicate from edit items submission configurations --- dspace/config/item-submission.xml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index b3e2f504c9b5..d2d159775ad2 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -228,7 +228,6 @@ - @@ -241,7 +240,6 @@ - @@ -254,7 +252,6 @@ - @@ -267,7 +264,6 @@ - @@ -288,7 +284,6 @@ - @@ -303,14 +298,12 @@ - - @@ -323,7 +316,6 @@ - @@ -336,7 +328,6 @@ - @@ -355,7 +346,6 @@ - @@ -368,7 +358,6 @@ - From fc3ed21cabfdf6e59d6eab69f0a62a3e6cb5ed73 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Tue, 7 Nov 2023 19:44:01 +0100 Subject: [PATCH 607/686] [DSC-1350] fix name permutations to skip too long names on best match plugin --- .../src/main/java/org/dspace/util/PersonNameUtil.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java index 0e88a0a9cdf5..2751db759e91 100644 --- a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java +++ b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java @@ -109,10 +109,13 @@ private static List getAllNamePermutations(String name) { List namePermutations = new ArrayList(); - PermutationIterator permutationIterator = new PermutationIterator(List.of(name.split(" "))); + List names = List.of(name.split(" ")); + if (names.size() < 5) { + PermutationIterator permutationIterator = new PermutationIterator(names); - while (permutationIterator.hasNext()) { - namePermutations.add(String.join(" ", permutationIterator.next())); + while (permutationIterator.hasNext()) { + namePermutations.add(String.join(" ", permutationIterator.next())); + } } return namePermutations; From 1fc3fb09bade7da4f0949d763818c3ce5869a47e Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Mon, 13 Nov 2023 11:02:12 +0100 Subject: [PATCH 608/686] [DSC-1350] - Add warning log --- .../SolrServiceBestMatchIndexingPlugin.java | 2 +- .../java/org/dspace/util/PersonNameUtil.java | 17 ++++++++++++----- .../org/dspace/util/PersonNameUtilTest.java | 18 +++++++++++------- 3 files changed, 24 insertions(+), 13 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java index 39130e9224d2..a1830a3931c7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java @@ -68,7 +68,7 @@ protected void addIndexValueForPersonItem(Item item, SolrInputDocument document) String lastName = getMetadataValue(item, LASTNAME_FIELD); List fullNames = getMetadataValues(item, FULLNAME_FIELDS); - getAllNameVariants(firstName, lastName, fullNames) + getAllNameVariants(firstName, lastName, fullNames, item.getID().toString()) .forEach(variant -> addIndexValue(document, variant)); } diff --git a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java index 2751db759e91..cea02c76990b 100644 --- a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java +++ b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java @@ -16,6 +16,7 @@ import org.apache.commons.collections4.iterators.PermutationIterator; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; /** * Utility class that handle person names. @@ -24,6 +25,7 @@ * */ public final class PersonNameUtil { + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PersonNameUtil.class); private PersonNameUtil() { @@ -35,12 +37,14 @@ private PersonNameUtil() { * @param firstName the first name * @param lastName the last name * @param fullNames the full names + * @param uuid the uuid * @return all the variants of the given names */ - public static Set getAllNameVariants(String firstName, String lastName, List fullNames) { + public static Set getAllNameVariants(String firstName, String lastName, List fullNames, + String uuid) { Set variants = new HashSet(); variants.addAll(getNameVariants(firstName, lastName)); - variants.addAll(getNameVariants(fullNames)); + variants.addAll(getNameVariants(fullNames, uuid)); return variants; } @@ -95,17 +99,17 @@ private static List getNameVariants(String[] firstNames, String lastName return variants; } - private static List getNameVariants(List fullNames) { + private static List getNameVariants(List fullNames, String uuid) { return fullNames.stream() .filter(Objects::nonNull) .map(name -> removeComma(name)) .distinct() - .flatMap(name -> getAllNamePermutations(name).stream()) + .flatMap(name -> getAllNamePermutations(name, uuid).stream()) .distinct() .collect(Collectors.toList()); } - private static List getAllNamePermutations(String name) { + private static List getAllNamePermutations(String name, String uuid) { List namePermutations = new ArrayList(); @@ -116,6 +120,9 @@ private static List getAllNamePermutations(String name) { while (permutationIterator.hasNext()) { namePermutations.add(String.join(" ", permutationIterator.next())); } + } else { + log.warn(String.format("Cannot retrieve variants on the Person with UUID %s because the name is too long", + uuid)); } return namePermutations; diff --git a/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java b/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java index fe80bf143756..c0c5a0c02194 100644 --- a/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java +++ b/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java @@ -27,7 +27,8 @@ public class PersonNameUtilTest { @Test public void testWithAllNames() { - Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, Luca", "Luke Giammo")); + Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, Luca", + "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni", "Luke Giammo", "Giammo Luke")); @@ -37,7 +38,7 @@ public void testWithAllNames() { public void testWithFirstNameComposedByTwoNames() { Set variants = getAllNameVariants("Luca Paolo", "Giamminonni", - List.of("Giamminonni, Luca", "Luke Giammo")); + List.of("Giamminonni, Luca", "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Paolo", "Luca Paolo Giamminonni", "Giamminonni Luca", "Luca Giamminonni", "Giamminonni Paolo", "Paolo Giamminonni", @@ -51,7 +52,7 @@ public void testWithFirstNameComposedByTwoNames() { public void testWithFirstNameComposedByThreeNames() { Set variants = getAllNameVariants("Luca Paolo Claudio", "Giamminonni", - List.of("Giamminonni, Luca", "Luke Giammo")); + List.of("Giamminonni, Luca", "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Paolo Claudio", "Luca Paolo Claudio Giamminonni", "Giamminonni Luca Claudio", "Luca Claudio Giamminonni", "Giamminonni Paolo Claudio", @@ -69,7 +70,8 @@ public void testWithFirstNameComposedByThreeNames() { @Test public void testWithoutFirstAndLastName() { - Set variants = getAllNameVariants(null, null, List.of("Giamminonni, Luca Fabio", "Luke Giammo")); + Set variants = getAllNameVariants(null, null, List.of("Giamminonni, Luca Fabio", "Luke Giammo"), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Fabio", "Fabio Luca Giamminonni", "Giamminonni Fabio Luca", "Luca Fabio Giamminonni", "Luca Giamminonni Fabio", @@ -80,12 +82,13 @@ public void testWithoutFirstAndLastName() { @Test public void testWithAlreadyTruncatedName() { - Set variants = getAllNameVariants("L.", "Giamminonni", List.of("Giamminonni, Luca")); + Set variants = getAllNameVariants("L.", "Giamminonni", List.of("Giamminonni, Luca"), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni")); - variants = getAllNameVariants("L. P.", "Giamminonni", List.of("Giamminonni, Luca")); + variants = getAllNameVariants("L. P.", "Giamminonni", List.of("Giamminonni, Luca"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "L. Giamminonni", "Giamminonni L.", "P. Giamminonni", "Giamminonni P.", "Giamminonni L. P.", "L. P. Giamminonni", @@ -97,7 +100,8 @@ public void testWithAlreadyTruncatedName() { @Test public void testWithAlreadyTruncatedNameOnFullName() { - Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, L.")); + Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, L."), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni")); From da318515288ef6c4032f9ad759f7f2cc2f28bef1 Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Tue, 12 Sep 2023 12:53:47 +0200 Subject: [PATCH 609/686] [DSC-1145] The reciprocal metadata does not update the solr document of related items --- .../ReciprocalItemAuthorityConsumer.java | 24 +++++ .../ReciprocalItemAuthorityConsumerIT.java | 91 ++++++++++++++++++- 2 files changed, 111 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java b/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java index a78430fb574b..faab946daa18 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java @@ -23,9 +23,12 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.discovery.IndexingService; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.event.Consumer; import org.dspace.event.Event; import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.utils.DSpace; /** @@ -42,6 +45,9 @@ public class ReciprocalItemAuthorityConsumer implements Consumer { private final transient Set processedHandles = new HashSet<>(); + private final IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), IndexingService.class); + private final ItemService itemService; public ReciprocalItemAuthorityConsumer() { @@ -127,6 +133,24 @@ private void assureReciprocalLink(Context ctx, Item target, String mdString, Str itemService.addMetadata(ctx, target, mdSplit[0], mdSplit[1], mdSplit.length > 2 ? mdSplit[2] : null, null, name, sourceUuid, Choices.CF_ACCEPTED); + reindexItem(ctx, target); + } + + private void reindexItem(Context ctx, Item target) throws SQLException { + IndexableItem item = new IndexableItem(target); + item.setIndexedObject(ctx.reloadEntity(item.getIndexedObject())); + String uniqueIndexID = item.getUniqueIndexID(); + if (uniqueIndexID != null) { + try { + indexer.indexContent(ctx, item, true, false, false); + log.debug("Indexed " + + item.getTypeText() + + ", id=" + item.getID() + + ", unique_id=" + uniqueIndexID); + } catch (Exception e) { + log.error("Failed while indexing object: ", e); + } + } } @Override diff --git a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java index aeba48df80ef..136a1be05239 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java @@ -10,6 +10,10 @@ import java.util.List; import java.util.UUID; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -21,6 +25,9 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.discovery.MockSolrSearchCore; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -29,19 +36,24 @@ public class ReciprocalItemAuthorityConsumerIT extends AbstractIntegrationTestWi private final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private MockSolrSearchCore searchService; + @Override @Before public void setUp() throws Exception { super.setUp(); context.turnOffAuthorisationSystem(); + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class); + parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") .build(); } @Test - public void testShouldCreatePublicationMetadataForProductItem() { + public void testShouldCreatePublicationMetadataForProductItem() throws Exception { String productTitle = "productTitle"; Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) .withEntityType("product") @@ -74,10 +86,22 @@ public void testShouldCreatePublicationMetadataForProductItem() { Assert.assertNotNull(metadataValues.get(0)); Assert.assertEquals(publicationItem.getID().toString(), metadataValues.get(0).getAuthority()); Assert.assertEquals(publicationItem.getName(), metadataValues.get(0).getValue()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertEquals(1, publicationTitles.size()); + Assert.assertEquals(publicationItem.getName(), publicationTitles.get(0)); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertEquals(1, publicationAuthorities.size()); + Assert.assertEquals(publicationItem.getID().toString(), publicationAuthorities.get(0)); } @Test - public void testShouldCreateProductMetadataForPublicationItem() { + public void testShouldCreateProductMetadataForPublicationItem() throws Exception { String publicationTitle = "publicationTitle"; Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) .withEntityType("publication") @@ -110,6 +134,18 @@ public void testShouldCreateProductMetadataForPublicationItem() { Assert.assertNotNull(metadataValues.get(0)); Assert.assertEquals(productItem.getID().toString(), metadataValues.get(0).getAuthority()); Assert.assertEquals(productItem.getName(), metadataValues.get(0).getValue()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertEquals(1, productTitles.size()); + Assert.assertEquals(productItem.getName(), productTitles.get(0)); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertEquals(1, productAuthorities.size()); + Assert.assertEquals(productItem.getID().toString(), productAuthorities.get(0)); } @Test @@ -143,6 +179,16 @@ public void testItemMentioningNotExistingAuthorityIsCreated() throws Exception { publicationItem, "dc.relation.product"); Assert.assertEquals(0, metadataValues.size()); + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertNull(productTitles); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertNull(productAuthorities); + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); Assert.assertEquals(productItem.getID(), foundProductItem.getID()); } @@ -162,6 +208,16 @@ public void testItemMentioningInvalidAuthorityIsCreated() throws Exception { .withType("product") .build(); + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertNull(publicationTitles); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertNull(publicationAuthorities); + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); Assert.assertEquals(productItem.getID(), foundProductItem.getID()); } @@ -172,7 +228,7 @@ public void testItemWithoutAuthorityIsCreated() throws Exception { Collection publicatoinItemCollection = CollectionBuilder.createCollection(context, parentCommunity) .withEntityType("publication") .withName("test_collection").build(); - Item publicatoinItem = ItemBuilder.createItem(context, publicatoinItemCollection) + Item publicationItem = ItemBuilder.createItem(context, publicatoinItemCollection) .withPersonIdentifierFirstName("test_first_name") .withPersonIdentifierLastName("test_second_name") .withScopusAuthorIdentifier("test_author_identifier") @@ -193,9 +249,19 @@ public void testItemWithoutAuthorityIsCreated() throws Exception { .build(); List metadataValues = itemService.getMetadataByMetadataString( - publicatoinItem, "dc.relation.product"); + publicationItem, "dc.relation.product"); Assert.assertEquals(0, metadataValues.size()); + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertNull(productTitles); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertNull(productAuthorities); + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); Assert.assertEquals(productItem.getID(), foundProductItem.getID()); } @@ -217,8 +283,25 @@ public void testItemWithoutPublicationMetadataIsCreated() throws Exception { productItem, "dc.relation.publication"); Assert.assertEquals(0, productItemMetadataValues.size()); + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertNull(publicationTitles); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertNull(publicationAuthorities); + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); Assert.assertEquals(productItem.getID(), foundProductItem.getID()); } + public SolrDocumentList getSolrDocumentList(Item item) throws Exception { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setQuery("search.resourceid:" + item.getID()); + QueryResponse queryResponse = searchService.getSolr().query(solrQuery); + return queryResponse.getResults(); + } + } \ No newline at end of file From 9b72b873c73bc318c1f67485ddb81f40a7dee6a5 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Mon, 27 Nov 2023 17:48:58 +0100 Subject: [PATCH 610/686] [DSC-950] Fix checkstyle --- .../dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index 336836a2a59f..d536f547d0bd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -18,7 +18,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.hasValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; From eafee97b23c18fcf3fb2f7e6b2506211e9ed12d8 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 28 Nov 2023 17:28:19 +0100 Subject: [PATCH 611/686] [DSC-1053] Fixes StatisticsRestRepositoryIT failures --- .../app/rest/StatisticsRestRepositoryIT.java | 2729 +++++++++-------- 1 file changed, 1475 insertions(+), 1254 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java index b0d740142c9d..8956cdc7d809 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java @@ -46,6 +46,7 @@ import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_TOTAL_DOWNLOADS; import static org.dspace.app.rest.utils.UsageReportUtils.TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS; +import static org.dspace.util.FunctionalUtils.throwingConsumerWrapper; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.not; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; @@ -64,9 +65,12 @@ import java.util.Calendar; import java.util.Collections; import java.util.Date; +import java.util.LinkedList; import java.util.List; import java.util.Locale; +import java.util.Queue; import java.util.UUID; +import java.util.function.Consumer; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; @@ -100,6 +104,9 @@ import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; +import org.dspace.services.EventService; +import org.dspace.services.model.Event; +import org.dspace.services.model.EventListener; import org.dspace.statistics.factory.StatisticsServiceFactory; import org.dspace.util.MultiFormatDateParser; import org.hamcrest.Matchers; @@ -117,10 +124,14 @@ */ public class StatisticsRestRepositoryIT extends AbstractControllerIntegrationTest { + protected final StatisticsEventListener statisticsEventListener = new StatisticsEventListener(); + @Autowired ConfigurationService configurationService; @Autowired protected AuthorizeService authorizeService; + @Autowired + protected EventService eventService; private Community communityNotVisited; private Community communityVisited; @@ -165,7 +176,9 @@ public void setUp() throws Exception { itemVisited = ItemBuilder.createItem(context, collectionNotVisited).build(); itemNotVisitedWithBitstreams = ItemBuilder.createItem(context, collectionNotVisited).build(); bitstreamNotVisited = BitstreamBuilder.createBitstream(context, - itemNotVisitedWithBitstreams, toInputStream("test", UTF_8)).withName("BitstreamNotVisitedName").build(); + itemNotVisitedWithBitstreams, + toInputStream("test", UTF_8)) + .withName("BitstreamNotVisitedName").build(); bitstreamVisited = BitstreamBuilder .createBitstream(context, itemNotVisitedWithBitstreams, toInputStream("test", UTF_8)) .withName("BitstreamVisitedName").build(); @@ -197,20 +210,22 @@ public void setUp() throws Exception { .build(); //bitstream for first publication of person bitstreampublication_first = BitstreamBuilder - .createBitstream(context, publicationVisited1, - toInputStream("test", UTF_8)) - .withName("bitstream1") + .createBitstream(context, publicationVisited1, + toInputStream("test", UTF_8)) + .withName("bitstream1") .build(); //bitstream for second publication of person bitstreampublication_second = BitstreamBuilder - .createBitstream(context, publicationVisited2, - toInputStream("test", UTF_8)) - .withName("bitstream2") + .createBitstream(context, publicationVisited2, + toInputStream("test", UTF_8)) + .withName("bitstream2") .build(); loggedInToken = getAuthToken(eperson.getEmail(), password); adminToken = getAuthToken(admin.getEmail(), password); + this.eventService.registerEventListener(this.statisticsEventListener); + context.restoreAuthSystemState(); } @@ -223,26 +238,26 @@ public void usagereports_withoutId_NotImplementedException() throws Exception { @Test public void usagereports_notProperUUIDAndReportId_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/notProperUUIDAndReportId")) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); + .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); } @Test public void usagereports_nonValidUUIDpart_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/notAnUUID" + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); + .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); } @Test public void usagereports_nonValidReportIDpart_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_NotValidReport")) - .andExpect(status().is(HttpStatus.NOT_FOUND.value())); + "_NotValidReport")) + .andExpect(status().is(HttpStatus.NOT_FOUND.value())); } @Test public void usagereports_nonValidReportIDpart_Exception_By_Anonymous_Unauthorized_Test() throws Exception { getClient().perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_NotValidReport")) + "_NotValidReport")) .andExpect(status().isUnauthorized()); } @@ -250,15 +265,15 @@ public void usagereports_nonValidReportIDpart_Exception_By_Anonymous_Unauthorize public void usagereports_nonValidReportIDpart_Exception_By_Anonymous_Test() throws Exception { configurationService.setProperty("usage-statistics.authorization.admin.usage", false); getClient().perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_NotValidReport")) + "_NotValidReport")) .andExpect(status().isNotFound()); } @Test public void usagereports_NonExistentUUID_Exception() throws Exception { getClient(adminToken).perform( - get("/api/statistics/usagereports/" + UUID.randomUUID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().is(HttpStatus.NOT_FOUND.value())); + get("/api/statistics/usagereports/" + UUID.randomUUID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().is(HttpStatus.NOT_FOUND.value())); } @Test @@ -275,7 +290,7 @@ public void usagereport_onlyAdminReadRights() throws Exception { // We request a dso's TotalVisits usage stat report as admin getClient(adminToken).perform( get("/api/statistics/usagereports/" + - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) // ** THEN ** .andExpect(status().isOk()); } @@ -287,8 +302,8 @@ public void usagereport_onlyAdminReadRights_unvalidToken() throws Exception { // We request a dso's TotalVisits usage stat report with unvalid token getClient("unvalidToken").perform( get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isUnauthorized()); + // ** THEN ** + .andExpect(status().isUnauthorized()); } @Test @@ -310,19 +325,19 @@ public void usagereport_loggedInUserReadRights() throws Exception { // We request a dso's TotalVisits usage stat report as anon but dso has no read policy for anon getClient().perform( get("/api/statistics/usagereports/" + - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) // ** THEN ** .andExpect(status().isUnauthorized()); // We request a dso's TotalVisits usage stat report as logged in eperson and has read policy for this user getClient(loggedInToken).perform( get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isForbidden()); + // ** THEN ** + .andExpect(status().isForbidden()); // We request a dso's TotalVisits usage stat report as another logged in eperson and has no read policy for // this user getClient(anotherLoggedInUserToken).perform( get("/api/statistics/usagereports/" + - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) // ** THEN ** .andExpect(status().isForbidden()); } @@ -345,19 +360,19 @@ public void usagereport_loggedInUserReadRights_and_usage_statistics_admin_is_fal String anotherLoggedInUserToken = getAuthToken(eperson1.getEmail(), password); // We request a dso's TotalVisits usage stat report as anon but dso has no read policy for anon getClient().perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_VISITS_REPORT_ID)) + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isUnauthorized()); // We request a dso's TotalVisits usage stat report as logged in eperson and has read policy for this user getClient(loggedInToken).perform(get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + - "_" + TOTAL_VISITS_REPORT_ID)) + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()); // We request a dso's TotalVisits usage stat report as another logged // in eperson and has no read policy for this user getClient(anotherLoggedInUserToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); } @Test @@ -368,27 +383,30 @@ public void totalVisitsReport_Community_Visited() throws Exception { viewEventRest.setTargetType("community"); viewEventRest.setTargetId(communityVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that community's TotalVisits stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(communityVisited, 1) + ) + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that community's TotalVisits stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(communityVisited, 1) - ) - ) - ))); } @Test @@ -397,18 +415,18 @@ public void totalVisitsReport_Community_NotVisited() throws Exception { // Community is never visited // And request that community's TotalVisits stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(communityNotVisited, 0) - ) - ) - ))); + get("/api/statistics/usagereports/" + communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(communityNotVisited, 0) + ) + ) + ))); } @Test @@ -426,25 +444,29 @@ public void totalVisitsReport_Collection_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TotalVisits stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(collectionVisited, 2) + ) + ) + ))); + })); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that collection's TotalVisits stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(collectionVisited, 2) - ) - ) - ))); } @Test @@ -453,18 +475,18 @@ public void totalVisitsReport_Collection_NotVisited() throws Exception { // Collection is never visited // And request that collection's TotalVisits stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(collectionNotVisited, 0) - ) - ) - ))); + get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(collectionNotVisited, 0) + ) + ) + ))); } @Test @@ -475,27 +497,32 @@ public void totalVisitsReport_Item_Visited() throws Exception { viewEventRest.setTargetType("item"); viewEventRest.setTargetId(itemVisited.getID()); + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TotalVisits stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(itemVisited, 1) + ) + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that collection's TotalVisits stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(itemVisited, 1) - ) - ) - ))); } @Test @@ -509,8 +536,8 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { // And request that item's TotalVisits stat report getClient(adminToken).perform( get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) + // ** THEN ** + .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( UsageReportMatcher.matchUsageReport( itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID, @@ -521,11 +548,11 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { // only admin access visits report getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); getClient().perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isUnauthorized()); // make statistics visible to all @@ -533,7 +560,7 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { getClient(loggedInToken).perform( get("/api/statistics/usagereports/" - + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( UsageReportMatcher.matchUsageReport( @@ -543,9 +570,9 @@ public void totalVisitsReport_Item_NotVisited() throws Exception { ) ))); - getClient().perform( + getClient().perform( get("/api/statistics/usagereports/" - + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( UsageReportMatcher.matchUsageReport( @@ -571,31 +598,31 @@ public void totalVisitsReport_Bitstream_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); List expectedPoints = List.of( getExpectedDsoViews(bitstreamVisited, 1) ); // And request that bitstream's TotalVisits stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - expectedPoints - ) - ))); - + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + expectedPoints + ) + ))); // only admin access visits report getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); getClient().perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isUnauthorized()); + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isUnauthorized()); // make statistics visible to all configurationService.setProperty("usage-statistics.authorization.admin.usage", false); @@ -635,24 +662,24 @@ public void totalVisitsReport_Bitstream_NotVisited() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); // And request that bitstream's TotalVisits stat report getClient(authToken).perform( - get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - expectedPoints - ) - ))); + get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + expectedPoints + ) + ))); String tokenEPerson = getAuthToken(eperson.getEmail(), password); getClient(tokenEPerson).perform( - get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) - .andExpect(status().isForbidden()); + get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + .andExpect(status().isForbidden()); getClient().perform( - get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) + get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isUnauthorized()); // make statistics visible to all @@ -669,7 +696,7 @@ public void totalVisitsReport_Bitstream_NotVisited() throws Exception { ) ))); - getClient().perform( + getClient().perform( get("/api/statistics/usagereports/" + bitstreamNotVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID)) .andExpect(status().isOk()) .andExpect(jsonPath("$", Matchers.is( @@ -696,54 +723,71 @@ public void totalVisitsPerMonthReport_Item_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); - List expectedPoints = getLastMonthVisitPoints(1); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = getLastMonthVisitPoints(1); - // And request that item's TotalVisitsPerMonth stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isForbidden()); + // And request that item's TotalVisitsPerMonth stat report + getClient(adminToken).perform( + get( + "/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID + ) + ) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + expectedPoints + ) + ))); - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isUnauthorized()); + // only admin has access + getClient(loggedInToken).perform( + get( + "/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID + ) + ) + .andExpect(status().isForbidden()); - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + .andExpect(status().isUnauthorized()); - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - expectedPoints - ) - ))); + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - expectedPoints + getClient(loggedInToken).perform( + get( + "/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID + ) ) - ))); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + expectedPoints + ) + ))); + })); } @Test @@ -752,17 +796,17 @@ public void totalVisitsPerMonthReport_Item_NotVisited() throws Exception { // Item is not visited // And request that item's TotalVisitsPerMonth stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(0) - ) - ))); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(0) + ) + ))); } @Test @@ -780,23 +824,29 @@ public void totalVisitsPerMonthReport_Collection_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TotalVisitsPerMonth stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + + TOTAL_VISITS_PER_MONTH_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(2) + ) + ))); + })); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that collection's TotalVisitsPerMonth stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(2) - ) - ))); } @Test @@ -807,63 +857,65 @@ public void TotalDownloadsReport_Bitstream() throws Exception { viewEventRest.setTargetType("bitstream"); viewEventRest.setTargetId(bitstreamVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = List.of( + getExpectedDsoViews(bitstreamVisited, 1) + ); + + // And request that bitstreams's TotalDownloads stat report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedPoints + ) + ))); + + // only admin has access to downloads report + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedPoints + ) + ))); + })); ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - List expectedPoints = List.of( - getExpectedDsoViews(bitstreamVisited, 1) - ); - - // And request that bitstreams's TotalDownloads stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access to downloads report - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedPoints - ) - ))); - - getClient().perform( - get("/api/statistics/usagereports/" + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedPoints - ) - ))); } @Test @@ -874,34 +926,41 @@ public void TotalDownloadsReport_Item() throws Exception { viewEventRest.setTargetType("bitstream"); viewEventRest.setTargetId(bitstreamVisited.getID()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPoint = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint.addValue("views", 1); + expectedPoint.setId(bitstreamVisited.getID().toString()); + expectedPoint.setLabel("BitstreamVisitedName"); + expectedPoint.setType("bitstream"); + + // And request that item's TotalDownloads stat report + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/" + + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID + ) + ) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of( + getExpectedDsoViews(bitstreamVisited, 1) + ) + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPoint = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint.addValue("views", 1); - expectedPoint.setId(bitstreamVisited.getID().toString()); - expectedPoint.setLabel("BitstreamVisitedName"); - expectedPoint.setType("bitstream"); - - // And request that item's TotalDownloads stat report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_DOWNLOADS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of( - getExpectedDsoViews(bitstreamVisited, 1) - ) - ) - ))); } @Test @@ -910,24 +969,17 @@ public void TotalDownloadsReport_Item_NotVisited() throws Exception { // You don't visit an item's bitstreams // And request that item's TotalDownloads stat report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + - TOTAL_DOWNLOADS_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of() - ) - ))); - } - - @Test - public void TotalDownloadsReport_SupportedDSO_Collection() throws Exception { - getClient(adminToken) - .perform(get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) - .andExpect(status().isOk()); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + + TOTAL_DOWNLOADS_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of() + ) + ))); } /** @@ -947,58 +999,67 @@ public void topCountriesReport_Collection_Visited() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - List expectedPoints = List.of( - getExpectedCountryViews(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale()), - 1)); - - // And request that collection's TopCountries report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access to countries report - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - expectedPoints + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = List.of( + getExpectedCountryViews(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale()), + 1)); + + // And request that collection's TopCountries report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + expectedPoints ) - ))); + ))); + + // only admin has access to countries report + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + expectedPoints + ) + ))); + })); + } - getClient().perform( - get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - expectedPoints - ) - ))); + @Test + public void TotalDownloadsReport_SupportedDSO_Collection() throws Exception { + getClient(adminToken) + .perform(get("/api/statistics/usagereports/" + collectionVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID)) + .andExpect(status().isOk()); } /** @@ -1023,25 +1084,26 @@ public void topCountriesReport_Community_Visited() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - + Thread.sleep(1000); UsageReportPointCountryRest expectedPoint = new UsageReportPointCountryRest(); expectedPoint.addValue("views", 2); - expectedPoint.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + expectedPoint.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); // And request that collection's TopCountries report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 2) - ) - ) - ))); + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 2) + ) + ) + ))); } /** @@ -1053,16 +1115,17 @@ public void topCountriesReport_Item_NotVisited() throws Exception { // Item is not visited // And request that item's TopCountries report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemNotVisitedWithBitstreams.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of() - ) - ))); + get("/api/statistics/usagereports/" + itemNotVisitedWithBitstreams.getID() + + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemNotVisitedWithBitstreams.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of() + ) + ))); } /** @@ -1076,63 +1139,66 @@ public void topCitiesReport_Item_Visited() throws Exception { viewEventRest.setTargetType("item"); viewEventRest.setTargetId(itemVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedPoints = List.of( + getExpectedCityViews("New York", 1) + ); + + // And request that item's TopCities report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + expectedPoints + ) + ))); + + // only admin has access to cities report + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isForbidden()); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isUnauthorized()); + + // make statistics visible to all + configurationService.setProperty("usage-statistics.authorization.admin.usage", false); + + getClient(loggedInToken).perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + expectedPoints + ) + ))); + + getClient().perform( + get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + expectedPoints + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - List expectedPoints = List.of( - getExpectedCityViews("New York", 1) - ); - - // And request that item's TopCities report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - expectedPoints - ) - ))); - - // only admin has access to cities report - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isForbidden()); - - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isUnauthorized()); - - // make statistics visible to all - configurationService.setProperty("usage-statistics.authorization.admin.usage", false); - - getClient(loggedInToken).perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - expectedPoints - ) - ))); - - getClient().perform( - get("/api/statistics/usagereports/" + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - expectedPoints - ) - ))); } /** @@ -1158,25 +1224,30 @@ public void topCitiesReport_Community_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that community's TopCities report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 3) + ) + ) + ))); + })); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - // And request that community's TopCities report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 3) - ) - ) - ))); } /** @@ -1188,22 +1259,22 @@ public void topCitiesReport_Collection_NotVisited() throws Exception { // Collection is not visited // And request that collection's TopCountries report getClient(adminToken).perform( - get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of() - ) - ))); - } - + get("/api/statistics/usagereports/" + collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + collectionNotVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of() + ) + ))); + } + @Test public void usagereportsSearch_notProperURI_Exception() throws Exception { getClient(adminToken).perform(get("/api/statistics/usagereports/search/object?uri=BadUri")) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); + .andExpect(status().is(HttpStatus.BAD_REQUEST.value())); } @Test @@ -1215,9 +1286,10 @@ public void usagereportsSearch_noURI_Exception() throws Exception { @Test public void usagereportsSearch_NonExistentUUID_Exception() throws Exception { getClient(adminToken).perform( - get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api" + + "/core" + "/items/" + UUID.randomUUID())) - .andExpect(status().is(HttpStatus.NOT_FOUND.value())); + .andExpect(status().is(HttpStatus.NOT_FOUND.value())); } @Test @@ -1245,7 +1317,7 @@ public void usagereportSearch_onlyAdminReadRights_unvalidToken() throws Exceptio // We request a dso's TotalVisits usage stat report with unvalid token getClient("unvalidToken") .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemNotVisitedWithBitstreams.getID())) + "/items/" + itemNotVisitedWithBitstreams.getID())) // ** THEN ** .andExpect(status().isOk()) .andExpect(jsonPath("$", hasNoJsonPath("$.points"))); @@ -1277,7 +1349,7 @@ public void usagereportSearch_loggedInUserReadRights() throws Exception { // We request a dso's TotalVisits usage stat report as logged in eperson and has read policy for this user getClient(loggedInToken) .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemNotVisitedWithBitstreams.getID())) + "/items/" + itemNotVisitedWithBitstreams.getID())) // ** THEN ** .andExpect(status().isOk()) .andExpect(jsonPath("$", hasNoJsonPath("$.points"))); @@ -1343,8 +1415,8 @@ public void usageReportsSearch_Site_mainReports() throws Exception { viewEventRest3.setTargetId(item3.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest3)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest3)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest4 = new ViewEventRest(); @@ -1352,8 +1424,8 @@ public void usageReportsSearch_Site_mainReports() throws Exception { viewEventRest4.setTargetId(item4.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) .andExpect(status().isCreated()); UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); @@ -1380,7 +1452,8 @@ public void usageReportsSearch_Site_mainReports() throws Exception { expectedPoint4.setLabel("My item 4"); expectedPoint4.setId(item4.getID().toString()); - List points = List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); + List points = + List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); pointCity.addValue("views", 5); @@ -1392,7 +1465,8 @@ public void usageReportsSearch_Site_mainReports() throws Exception { UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); pointCountry.addValue("views", 5); - pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); publicationCategory.addValue("views", 1); @@ -1435,26 +1509,35 @@ public void usageReportsSearch_Site_mainReports() throws Exception { eventCategory.setId("event"); List categories = List.of(publicationCategory, patentCategory, fundingCategory, - projectCategory, productCategory, journalCategory, personCategory, orgUnitCategory, + projectCategory, productCategory, journalCategory, + personCategory, orgUnitCategory, equipmentCategory, eventCategory); - - // And request the sites global usage report (show top most popular items) - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object") - .param("category", "site-mainReports") - .param("uri", "http://localhost:8080/server/api/core/sites/" + site.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID, points), - matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, List.of(pointCity)), - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), - matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, TOP_CONTINENTS_REPORT_ID, - List.of(pointContinent)), - matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, TOP_CATEGORIES_REPORT_ID, categories), - matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, - List.of(pointCountry))))); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request the sites global usage report (show top most popular items) + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "site-mainReports") + .param("uri", "http://localhost:8080/server/api/core/sites/" + site.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID + , points), + matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, + List.of(pointCity)), + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), + matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, + List.of(pointContinent)), + matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, + categories), + matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of(pointCountry))))); + })); } @Test @@ -1481,32 +1564,7 @@ public void usageReportsSearch_Site_downloadReports() throws Exception { Bitstream bitstream3 = createBitstream(item2, "Bitstream 3"); Bitstream bitstream4 = createBitstream(item3, "Bitstream 4"); - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) - .andExpect(status().isOk()); - context.restoreAuthSystemState(); - UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); expectedPoint1.addValue("views", 3); expectedPoint1.setType("item"); @@ -1537,7 +1595,34 @@ public void usageReportsSearch_Site_downloadReports() throws Exception { UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); pointCountry.addValue("views", 8); - pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream4.getID() + "/content")) + .andExpect(status().isOk()); + + Thread.sleep(1000); getClient(adminToken) .perform(get("/api/statistics/usagereports/search/object") @@ -1555,6 +1640,7 @@ TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(8)), TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), matchUsageReport(site.getID() + "_" + TOP_DOWNLOAD_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + } private Bitstream createBitstream(Item item, String name) throws Exception { @@ -1571,63 +1657,68 @@ public void usageReportsSearch_Community_Visited() throws Exception { viewEventRest.setTargetType("community"); viewEventRest.setTargetId(communityVisited.getID()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("community"); + expectedPointTotalVisits.setId(communityVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + // And request the community usage reports + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object?category=community-mainReports" + + "&uri=http://localhost:8080/server/api/core" + + "/communities/" + communityVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(communityVisited, 1) + ) + ), + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ) + ))); + })); + + ObjectMapper mapper = new ObjectMapper(); getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("community"); - expectedPointTotalVisits.setId(communityVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?category=community-mainReports" + - "&uri=http://localhost:8080/server/api/core" + - "/communities/" + communityVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(communityVisited, 1) - ) - ), - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ) - ))); } @Test @@ -1637,8 +1728,8 @@ public void usageReportsSearch_Collection_NotVisited() throws Exception { // And request the collection's usage reports getClient(adminToken) .perform(get("/api/statistics/usagereports/search/object?category=collection-mainReports" + - "&uri=http://localhost:8080/server/api/core" + - "/collections/" + collectionNotVisited.getID())) + "&uri=http://localhost:8080/server/api/core" + + "/collections/" + collectionNotVisited.getID())) // ** THEN ** .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) @@ -1682,73 +1773,80 @@ public void usageReportsSearch_Item_Visited_FileNotVisited() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("item"); - expectedPointTotalVisits.setId(itemVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - //views and downloads - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit1.addValue("views", 1); - expectedPointTotalVisitsBit1.setType("item"); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit2.addValue("views", 0); - expectedPointTotalVisitsBit2.setType("bitstream"); - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(itemVisited, 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of() - ) - ))); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("item"); + expectedPointTotalVisits.setId(itemVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + //views and downloads + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit1.addValue("views", 1); + expectedPointTotalVisitsBit1.setType("item"); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit2.addValue("views", 0); + expectedPointTotalVisitsBit2.setType("bitstream"); + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + + + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + itemVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(itemVisited, 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of() + ) + ))); + })); } @Test @@ -1756,14 +1854,14 @@ public void usageReportsSearch_ItemVisited_FilesVisited() throws Exception { context.turnOffAuthorisationSystem(); Bitstream bitstream1 = BitstreamBuilder.createBitstream(context, itemVisited, - toInputStream("test", UTF_8)) - .withName("bitstream1") - .build(); + toInputStream("test", UTF_8)) + .withName("bitstream1") + .build(); Bitstream bitstream2 = BitstreamBuilder.createBitstream(context, itemVisited, - toInputStream("test", UTF_8)) - .withName("bitstream2") - .build(); + toInputStream("test", UTF_8)) + .withName("bitstream2") + .build(); context.restoreAuthSystemState(); // ** WHEN ** @@ -1795,98 +1893,112 @@ public void usageReportsSearch_ItemVisited_FilesVisited() throws Exception { .content(mapper.writeValueAsBytes(viewEventRestBit2)) .contentType(contentType)) .andExpect(status().isCreated()); + + Thread.sleep(3000); + + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("item"); + expectedPointTotalVisits.setId(itemVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit1.addValue("views", 1); + expectedPointTotalVisitsBit1.setLabel("bitstream1"); + expectedPointTotalVisitsBit1.setId(bitstream1.getID().toString()); + expectedPointTotalVisitsBit1.setType("bitstream"); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit2.addValue("views", 2); + expectedPointTotalVisitsBit2.setLabel("bitstream2"); + expectedPointTotalVisitsBit2.setId(bitstream2.getID().toString()); + expectedPointTotalVisitsBit2.setType("bitstream"); + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + + + // first point for views + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsItem = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsItem.addValue("views", 1); + expectedPointTotalVisitsItem.setType("item"); + + //second point for total downlods + UsageReportPointDsoTotalVisitsRest expectedPointTotalDownloads = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalDownloads.addValue("views", 3); + expectedPointTotalDownloads.setType("bitstream"); + + List usageReportPointRestsVisitsAndDownloads = new ArrayList<>(); + usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalVisitsItem); + usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalDownloads); + + + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + itemVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of( + getExpectedDsoViews(itemVisited, 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + List.of( + getExpectedDsoViews(bitstream1, 1), + getExpectedDsoViews(bitstream2, 2) + ) + ) + ))); + })); + getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRestBit2)) .contentType(contentType)) - .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("item"); - expectedPointTotalVisits.setId(itemVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit1.addValue("views", 1); - expectedPointTotalVisitsBit1.setLabel("bitstream1"); - expectedPointTotalVisitsBit1.setId(bitstream1.getID().toString()); - expectedPointTotalVisitsBit1.setType("bitstream"); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit2.addValue("views", 2); - expectedPointTotalVisitsBit2.setLabel("bitstream2"); - expectedPointTotalVisitsBit2.setId(bitstream2.getID().toString()); - expectedPointTotalVisitsBit2.setType("bitstream"); - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - - - // first point for views - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsItem = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsItem.addValue("views", 1); - expectedPointTotalVisitsItem.setType("item"); - - //second point for total downlods - UsageReportPointDsoTotalVisitsRest expectedPointTotalDownloads = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalDownloads.addValue("views", 3); - expectedPointTotalDownloads.setType("bitstream"); - - List usageReportPointRestsVisitsAndDownloads = new ArrayList<>(); - usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalVisitsItem); - usageReportPointRestsVisitsAndDownloads.add(expectedPointTotalDownloads); - - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + itemVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - List.of( - getExpectedDsoViews(itemVisited, 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - itemVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - List.of( - getExpectedDsoViews(bitstream1, 1), - getExpectedDsoViews(bitstream2, 2) - ) - ) - ))); + .andExpect(status().isCreated()); } @Test @@ -1897,55 +2009,61 @@ public void usageReportsSearch_Bitstream_Visited() throws Exception { viewEventRest.setTargetType("bitstream"); viewEventRest.setTargetId(bitstreamVisited.getID()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + List expectedTotalVisits = List.of( + getExpectedDsoViews(bitstreamVisited, 1) + ); + + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + bitstreamVisited.getID())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + expectedTotalVisits + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1) + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOP_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, + List.of( + getExpectedCityViews("New York", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 1) + ) + ), + UsageReportMatcher.matchUsageReport( + bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, + TOTAL_DOWNLOADS_REPORT_ID, + expectedTotalVisits + ) + ))); + })); + ObjectMapper mapper = new ObjectMapper(); getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); - - List expectedTotalVisits = List.of( - getExpectedDsoViews(bitstreamVisited, 1) - ); - - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + bitstreamVisited.getID())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - expectedTotalVisits - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1) - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOP_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, - List.of( - getExpectedCityViews("New York", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 1) - ) - ), - UsageReportMatcher.matchUsageReport( - bitstreamVisited.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID, - TOTAL_DOWNLOADS_REPORT_ID, - expectedTotalVisits - ) - ))); + .andExpect(status().isCreated()); } // This test search for statistics before the moment in which item is visited @@ -1965,83 +2083,93 @@ public void usageReportsSearch_ItemNotVisited_AtTime() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - //create expected raport points - List points = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint1.addValue("views", 0); - expectedPoint1.setType("item"); - points.add(expectedPoint1); - - - UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); - publicationCategory.addValue("views", 0); - publicationCategory.setId("publication"); - - UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); - patentCategory.addValue("views", 0); - patentCategory.setId("patent"); - - UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); - fundingCategory.addValue("views", 0); - fundingCategory.setId("funding"); - - UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); - projectCategory.addValue("views", 0); - projectCategory.setId("project"); - - UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); - productCategory.addValue("views", 0); - productCategory.setId("product"); - - UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); - journalCategory.addValue("views", 0); - journalCategory.setId("journal"); - - UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); - personCategory.addValue("views", 0); - personCategory.setId("person"); - - UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); - orgUnitCategory.addValue("views", 0); - orgUnitCategory.setId("orgunit"); - - UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); - equipmentCategory.addValue("views", 0); - equipmentCategory.setId("equipment"); - - UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); - eventCategory.addValue("views", 0); - eventCategory.setId("event"); - - List categories = List.of(publicationCategory, patentCategory, fundingCategory, - projectCategory, productCategory, journalCategory, personCategory, orgUnitCategory, - equipmentCategory, eventCategory); - - UsageReportPointRest pointPerMonth = new UsageReportPointDateRest(); - pointPerMonth.setId("June 2019"); - pointPerMonth.addValue("views", 0); - - List pointsPerMonth = List.of(pointPerMonth); - - // And request the sites global usage report (show top most popular items) for a specific date range - // we expect no points becase we are searching in a moment before the view of item happened - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/sites/" + site.getID() + "&startDate=2019-06-01&endDate=2019-06-02&category=site-mainReports")) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID, points), - matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, List.of()), - matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, pointsPerMonth), - matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, - TOP_CONTINENTS_REPORT_ID,List.of()), - matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, - TOP_CATEGORIES_REPORT_ID, categories), - matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, List.of())))); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + //create expected raport points + List points = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 0); + expectedPoint1.setType("item"); + points.add(expectedPoint1); + + + UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); + publicationCategory.addValue("views", 0); + publicationCategory.setId("publication"); + + UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); + patentCategory.addValue("views", 0); + patentCategory.setId("patent"); + + UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); + fundingCategory.addValue("views", 0); + fundingCategory.setId("funding"); + + UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); + projectCategory.addValue("views", 0); + projectCategory.setId("project"); + + UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); + productCategory.addValue("views", 0); + productCategory.setId("product"); + + UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); + journalCategory.addValue("views", 0); + journalCategory.setId("journal"); + + UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); + personCategory.addValue("views", 0); + personCategory.setId("person"); + + UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); + orgUnitCategory.addValue("views", 0); + orgUnitCategory.setId("orgunit"); + + UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); + equipmentCategory.addValue("views", 0); + equipmentCategory.setId("equipment"); + + UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); + eventCategory.addValue("views", 0); + eventCategory.setId("event"); + + List categories = List.of(publicationCategory, patentCategory, + fundingCategory, + projectCategory, productCategory, journalCategory, + personCategory, orgUnitCategory, + equipmentCategory, eventCategory); + + UsageReportPointRest pointPerMonth = new UsageReportPointDateRest(); + pointPerMonth.setId("June 2019"); + pointPerMonth.addValue("views", 0); + + List pointsPerMonth = List.of(pointPerMonth); + + // And request the sites global usage report (show top most popular items) for a specific date range + // we expect no points becase we are searching in a moment before the view of item happened + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/sites/" + site.getID() + + "&startDate=2019-06-01&endDate=2019-06-02&category=site-mainReports")) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOP_ITEMS_REPORT_ID + , points), + matchUsageReport(site.getID() + "_" + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, + List.of()), + matchUsageReport(site.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, pointsPerMonth), + matchUsageReport(site.getID() + "_" + TOP_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of()), + matchUsageReport(site.getID() + "_" + TOP_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, categories), + matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of())))); + })); } // This test search for statistics one day after the moment in which community is visited @@ -2059,32 +2187,39 @@ public void usageReportsSearch_Community_VisitedAtTime() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - getExpectedDsoViews(communityVisited, 1); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = getExpectedDsoViews(communityVisited, 1); - - UsageReportPointCityRest expectedPointCity = getExpectedCityViews("New York", 1); - UsageReportPointCountryRest expectedPointCountry = getExpectedCountryViews(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale()), 1); - - //add one day to the moment when we visit the community - DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); - Calendar cal = Calendar.getInstance(); - cal.add(Calendar.DATE, 1); - String endDate = dateFormat.format(cal.getTime()); - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?category=community-mainReports" + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + getExpectedDsoViews(communityVisited, 1); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + getExpectedDsoViews(communityVisited, 1); + + UsageReportPointCityRest expectedPointCity = getExpectedCityViews("New York", 1); + + UsageReportPointCountryRest expectedPointCountry = getExpectedCountryViews(Locale.US.getCountry(), + Locale.US.getDisplayCountry( + context.getCurrentLocale()), + 1); + + //add one day to the moment when we visit the community + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + Calendar cal = Calendar.getInstance(); + cal.add(Calendar.DATE, 1); + String endDate = dateFormat.format(cal.getTime()); + // And request the community usage reports + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object?category=community-mainReports" + "&uri=http://localhost:8080/server/api/core" + - "/communities/" + communityVisited.getID() + "&startDate=2019-06-01&endDate=" + endDate)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + "/communities/" + communityVisited.getID() + "&startDate=2019-06-01&endDate=" + + endDate)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( UsageReportMatcher .matchUsageReport(communityVisited.getID() + "_" + - TOTAL_VISITS_REPORT_ID, TOTAL_VISITS_REPORT_ID, - Arrays.asList(expectedPointTotalVisits)), + TOTAL_VISITS_REPORT_ID, TOTAL_VISITS_REPORT_ID, + List.of(expectedPointTotalVisits)), UsageReportMatcher.matchUsageReport(communityVisited.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID, TOTAL_VISITS_PER_MONTH_REPORT_ID, @@ -2096,7 +2231,8 @@ public void usageReportsSearch_Community_VisitedAtTime() throws Exception { TOP_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, Arrays.asList(expectedPointCountry)) - ))); + ))); + })); } // filter bitstream only with start date @@ -2114,40 +2250,46 @@ public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) .andExpect(status().isCreated()); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisits.addValue("views", 1); - expectedPointTotalVisits.setType("bitstream"); - expectedPointTotalVisits.setLabel("BitstreamVisitedName"); - expectedPointTotalVisits.setId(bitstreamVisited.getID().toString()); - - UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); - expectedPointCity.addValue("views", 1); - expectedPointCity.setId("New York"); - - UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); - expectedPointCountry.addValue("views", 1); - expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale())); - - //downloads and views expected points - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit1.addValue("views", 1); - expectedPointTotalVisitsBit1.setType("bitstream"); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPointTotalVisitsBit2.addValue("views", 0); - expectedPointTotalVisitsBit2.setType("bitstream"); - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - // And request the community usage reports - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + bitstreamVisited.getID() + "&startDate=2019-05-01")) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisits.addValue("views", 1); + expectedPointTotalVisits.setType("bitstream"); + expectedPointTotalVisits.setLabel("BitstreamVisitedName"); + expectedPointTotalVisits.setId(bitstreamVisited.getID().toString()); + + UsageReportPointCityRest expectedPointCity = new UsageReportPointCityRest(); + expectedPointCity.addValue("views", 1); + expectedPointCity.setId("New York"); + + UsageReportPointCountryRest expectedPointCountry = new UsageReportPointCountryRest(); + expectedPointCountry.addValue("views", 1); + expectedPointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + //downloads and views expected points + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit1.addValue("views", 1); + expectedPointTotalVisitsBit1.setType("bitstream"); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = + new UsageReportPointDsoTotalVisitsRest(); + expectedPointTotalVisitsBit2.addValue("views", 0); + expectedPointTotalVisitsBit2.setType("bitstream"); + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + // And request the community usage reports + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + bitstreamVisited.getID() + "&startDate=2019-05-01")) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( UsageReportMatcher.matchUsageReport(bitstreamVisited.getID() + "_" + TOTAL_VISITS_REPORT_ID, TOTAL_VISITS_REPORT_ID, Arrays.asList(expectedPointTotalVisits)), @@ -2166,8 +2308,10 @@ public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { TOTAL_DOWNLOADS_REPORT_ID, TOTAL_DOWNLOADS_REPORT_ID, Arrays.asList(expectedPointTotalVisits)) - ))); + ))); + })); } + //test for inverse relation between person and publication @Test public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { @@ -2186,46 +2330,16 @@ public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { viewEventRestSecondPublicationOfPerson.setTargetType("item"); viewEventRestSecondPublicationOfPerson.setTargetId(publicationVisited2.getID()); - //first bitstream visit - ViewEventRest viewEventRestFirstPublicationBitstream = new ViewEventRest(); - viewEventRestFirstPublicationBitstream.setTargetType("bitstream"); - viewEventRestFirstPublicationBitstream.setTargetId(bitstreampublication_first.getID()); - - //second bitstream visit - ViewEventRest viewEventRestSecondPublicationBitstream = new ViewEventRest(); - viewEventRestSecondPublicationBitstream.setTargetType("bitstream"); - viewEventRestSecondPublicationBitstream.setTargetId(bitstreampublication_second.getID()); - - //create viewevents for all of items and bistreams - ObjectMapper mapper = new ObjectMapper(); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestItem)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationOfPerson)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) - .contentType(contentType)) - .andExpect(status().isCreated()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) - .contentType(contentType)) - .andExpect(status().isCreated()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationBitstream)) - .contentType(contentType)) - .andExpect(status().isCreated()); + //first bitstream visit + ViewEventRest viewEventRestFirstPublicationBitstream = new ViewEventRest(); + viewEventRestFirstPublicationBitstream.setTargetType("bitstream"); + viewEventRestFirstPublicationBitstream.setTargetId(bitstreampublication_first.getID()); + + //second bitstream visit + ViewEventRest viewEventRestSecondPublicationBitstream = new ViewEventRest(); + viewEventRestSecondPublicationBitstream.setTargetType("bitstream"); + viewEventRestSecondPublicationBitstream.setTargetId(bitstreampublication_second.getID()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) - .contentType(contentType)) - .andExpect(status().isCreated()); //create expected report points for visits UsageReportPointDsoTotalVisitsRest totalVisitRelation = new UsageReportPointDsoTotalVisitsRest(); @@ -2240,7 +2354,8 @@ public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { expectedPointTotal.setLabel(person.getName()); expectedPointTotal.setId(person.getID().toString()); - UsageReportPointDsoTotalVisitsRest totalVisitRelationProjects = new UsageReportPointDsoTotalVisitsRest(); + UsageReportPointDsoTotalVisitsRest totalVisitRelationProjects = + new UsageReportPointDsoTotalVisitsRest(); totalVisitRelationProjects.addValue("views", 0); totalVisitRelationProjects.setType("item"); totalVisitRelationProjects.setLabel("Views"); @@ -2264,62 +2379,102 @@ public void usageReportsSearch_PersonWithPublicationVisited() throws Exception { expectedPointCountryWithRelation.addValue("views", 3); expectedPointCountryWithRelation.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + person.getID().toString())) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + + //create viewevents for all of items and bistreams + ObjectMapper mapper = new ObjectMapper(); + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestItem)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationOfPerson)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationOfPerson)) + .contentType(contentType)) + .andExpect(status().isCreated()); + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestFirstPublicationBitstream)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + Thread.sleep(1000); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + person.getID().toString())) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_REPORT_ID, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(expectedPointTotal)), + TOTAL_VISITS_REPORT_ID, + TOTAL_VISITS_REPORT_ID, + List.of(expectedPointTotal)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(totalVisitRelation)), + TOTAL_VISITS_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOTAL_VISITS_REPORT_ID, + List.of(totalVisitRelation)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(1)), + TOTAL_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(1)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(3)), + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(3)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_PROJECTS, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(0)), + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_PERSON_PROJECTS, + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(0)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, - Arrays.asList(expectedPointCity)), + TOP_CITIES_REPORT_ID, TOP_CITIES_REPORT_ID, + List.of(expectedPointCity)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_CITIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOP_CITIES_REPORT_ID, - Arrays.asList(expectedPointCityWithRelation)), + TOP_CITIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOP_CITIES_REPORT_ID, + List.of(expectedPointCityWithRelation)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_CITIES_REPORT_ID_RELATION_PERSON_PROJECTS, - TOP_CITIES_REPORT_ID, - Collections.emptyList()), + TOP_CITIES_REPORT_ID_RELATION_PERSON_PROJECTS, + TOP_CITIES_REPORT_ID, + Collections.emptyList()), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - Arrays.asList(expectedPointCountry)), + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of(expectedPointCountry)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, - TOP_COUNTRIES_REPORT_ID, - Arrays.asList(expectedPointCountryWithRelation)), + TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_RESEARCHOUTPUTS, + TOP_COUNTRIES_REPORT_ID, + List.of(expectedPointCountryWithRelation)), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_PROJECTS, - TOP_COUNTRIES_REPORT_ID, - Collections.emptyList()), + TOP_COUNTRIES_REPORT_ID_RELATION_PERSON_PROJECTS, + TOP_COUNTRIES_REPORT_ID, + Collections.emptyList()), UsageReportMatcher.matchUsageReport(person.getID() + "_" + - TOTAL_VISITS_REPORT_ID_RELATION_PERSON_PROJECTS, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(totalVisitRelationProjects)) - ))); + TOTAL_VISITS_REPORT_ID_RELATION_PERSON_PROJECTS, + TOTAL_VISITS_REPORT_ID, + List.of(totalVisitRelationProjects)) + ))); + })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) + .contentType(contentType)) + .andExpect(status().isCreated()); } + //test for inverse relation between orgunit and publication @Test public void usageReportsSearch_OrgUnitWithPublicationVisited() throws Exception { @@ -2389,90 +2544,97 @@ public void usageReportsSearch_OrgUnitWithPublicationVisited() throws Exception .contentType(contentType)) .andExpect(status().isCreated()); - //create expected report points for visits - UsageReportPointDsoTotalVisitsRest totalVisitRelation = new UsageReportPointDsoTotalVisitsRest(); - totalVisitRelation.addValue("views", 3); - totalVisitRelation.setType("item"); - totalVisitRelation.setLabel("Views"); - totalVisitRelation.setId(orgUnit.getID().toString()); - - //create expected report points for city visits with relation - UsageReportPointCityRest expectedPointCityWithRelation = getExpectedCityViews("New York", 3); - - //create expected report points for country visits with relation - UsageReportPointCountryRest expectedPointCountryWithRelation = getExpectedCountryViews(Locale.US.getCountry(), - Locale.US.getDisplayCountry(context.getCurrentLocale()), 3); - - //top items expected report points - List points = new ArrayList<>(); - //first publication - UsageReportPointDsoTotalVisitsRest expectedPoint1 = getExpectedDsoViews(publicationVisited2, 2); - points.add(expectedPoint1); - //second publication - UsageReportPointDsoTotalVisitsRest expectedPoint2 = getExpectedDsoViews(publicationVisited1, 1); - points.add(expectedPoint2); - - //total downloads expected points - List totalDownloadsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = getExpectedDsoViews( - bitstreampublication_first, 1); - - UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = getExpectedDsoViews( - bitstreampublication_second, 1); - - totalDownloadsPoints.add(expectedPointTotalVisitsBit1); - totalDownloadsPoints.add(expectedPointTotalVisitsBit2); - - //total downloads and views expected points - //views - List totalDownloadsAndViewsPoints = new ArrayList<>(); - UsageReportPointDsoTotalVisitsRest views = new UsageReportPointDsoTotalVisitsRest(); - views.addValue("views", 3); - views.setType("item"); - views.setLabel("Item visits"); - //downloads - UsageReportPointDsoTotalVisitsRest downloads = new UsageReportPointDsoTotalVisitsRest(); - downloads.addValue("views", 2); - downloads.setType("bitstream"); - downloads.setLabel("File visits"); - totalDownloadsAndViewsPoints.add(views); - totalDownloadsAndViewsPoints.add(downloads); - - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server/api/core" + - "/items/" + orgUnit.getID().toString()) + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + + //create expected report points for visits + UsageReportPointDsoTotalVisitsRest totalVisitRelation = new UsageReportPointDsoTotalVisitsRest(); + totalVisitRelation.addValue("views", 3); + totalVisitRelation.setType("item"); + totalVisitRelation.setLabel("Views"); + totalVisitRelation.setId(orgUnit.getID().toString()); + + //create expected report points for city visits with relation + UsageReportPointCityRest expectedPointCityWithRelation = getExpectedCityViews("New York", 3); + + //create expected report points for country visits with relation + UsageReportPointCountryRest expectedPointCountryWithRelation = + getExpectedCountryViews(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale()), 3); + + //top items expected report points + List points = new ArrayList<>(); + //first publication + UsageReportPointDsoTotalVisitsRest expectedPoint1 = getExpectedDsoViews(publicationVisited2, 2); + points.add(expectedPoint1); + //second publication + UsageReportPointDsoTotalVisitsRest expectedPoint2 = getExpectedDsoViews(publicationVisited1, 1); + points.add(expectedPoint2); + + //total downloads expected points + List totalDownloadsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit1 = getExpectedDsoViews( + bitstreampublication_first, 1); + + UsageReportPointDsoTotalVisitsRest expectedPointTotalVisitsBit2 = getExpectedDsoViews( + bitstreampublication_second, 1); + + totalDownloadsPoints.add(expectedPointTotalVisitsBit1); + totalDownloadsPoints.add(expectedPointTotalVisitsBit2); + + //total downloads and views expected points + //views + List totalDownloadsAndViewsPoints = new ArrayList<>(); + UsageReportPointDsoTotalVisitsRest views = new UsageReportPointDsoTotalVisitsRest(); + views.addValue("views", 3); + views.setType("item"); + views.setLabel("Item visits"); + //downloads + UsageReportPointDsoTotalVisitsRest downloads = new UsageReportPointDsoTotalVisitsRest(); + downloads.addValue("views", 2); + downloads.setType("bitstream"); + downloads.setLabel("File visits"); + totalDownloadsAndViewsPoints.add(views); + totalDownloadsAndViewsPoints.add(downloads); + + getClient(adminToken) + .perform( + get("/api/statistics/usagereports/search/object?uri=http://localhost:8080/server" + + "/api/core" + + "/items/" + orgUnit.getID().toString()) .param("size", "50")) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.hasItems( + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.hasItems( UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_VISITS_REPORT_ID, - Arrays.asList(totalVisitRelation)), + TOTAL_VISITS_REPORT_ID, + List.of(totalVisitRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_VISITS_PER_MONTH_REPORT_ID, - getLastMonthVisitPoints(3)), + TOTAL_VISITS_PER_MONTH_REPORT_ID, + getLastMonthVisitPoints(3)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOP_CITIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOP_CITIES_REPORT_ID, - Arrays.asList(expectedPointCityWithRelation)), + TOP_CITIES_REPORT_ID, + List.of(expectedPointCityWithRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOP_COUNTRIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOP_COUNTRIES_REPORT_ID, - Arrays.asList(expectedPointCountryWithRelation)), + TOP_COUNTRIES_REPORT_ID, + List.of(expectedPointCountryWithRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOP_ITEMS_REPORT_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOP_ITEMS_REPORT_ID, points), + TOP_ITEMS_REPORT_ID, points), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOTAL_DOWNLOADS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_DOWNLOADS_REPORT_ID, totalDownloadsPoints), + TOTAL_DOWNLOADS_REPORT_ID, totalDownloadsPoints), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, - TOTAL_VISITS_TOTAL_DOWNLOADS, - totalDownloadsAndViewsPoints) - ))); + TOTAL_VISITS_TOTAL_DOWNLOADS, + totalDownloadsAndViewsPoints) + ))); + })); } @Test @@ -2510,8 +2672,8 @@ public void usageReportsSearch_Collection_ItemReports() throws Exception { viewEventRest.setTargetId(item.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest2 = new ViewEventRest(); @@ -2519,13 +2681,13 @@ public void usageReportsSearch_Collection_ItemReports() throws Exception { viewEventRest2.setTargetId(item2.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest2)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) .andExpect(status().isCreated()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest2)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest3 = new ViewEventRest(); @@ -2533,8 +2695,8 @@ public void usageReportsSearch_Collection_ItemReports() throws Exception { viewEventRest3.setTargetId(item3.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest3)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest3)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest4 = new ViewEventRest(); @@ -2542,95 +2704,103 @@ public void usageReportsSearch_Collection_ItemReports() throws Exception { viewEventRest4.setTargetId(item4.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) .andExpect(status().isCreated()); - UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint1.addValue("views", 1); - expectedPoint1.setType("item"); - expectedPoint1.setLabel("My item"); - expectedPoint1.setId(item.getID().toString()); - - UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint2.addValue("views", 2); - expectedPoint2.setType("item"); - expectedPoint2.setLabel("My item 2"); - expectedPoint2.setId(item2.getID().toString()); - - UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint3.addValue("views", 1); - expectedPoint3.setType("item"); - expectedPoint3.setLabel("My item 3"); - expectedPoint3.setId(item3.getID().toString()); - - UsageReportPointDsoTotalVisitsRest expectedPoint4 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint4.addValue("views", 1); - expectedPoint4.setType("item"); - expectedPoint4.setLabel("My item 4"); - expectedPoint4.setId(item4.getID().toString()); - - List points = List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); - - UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); - pointCity.addValue("views", 5); - pointCity.setId("New York"); - - UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); - pointContinent.addValue("views", 5); - pointContinent.setId("North America"); - - UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); - pointCountry.addValue("views", 5); - pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); - - UsageReportPointCategoryRest articleCategory = new UsageReportPointCategoryRest(); - articleCategory.addValue("views", 1); - articleCategory.setId("article"); - - UsageReportPointCategoryRest thesisCategory = new UsageReportPointCategoryRest(); - thesisCategory.addValue("views", 3); - thesisCategory.setId("thesis"); - - UsageReportPointCategoryRest otherCategory = new UsageReportPointCategoryRest(); - otherCategory.addValue("views", 1); - otherCategory.setId("other"); - - UsageReportPointCategoryRest bookCategory = new UsageReportPointCategoryRest(); - bookCategory.addValue("views", 0); - bookCategory.setId("book"); - - UsageReportPointCategoryRest bookChapterCategory = new UsageReportPointCategoryRest(); - bookChapterCategory.addValue("views", 0); - bookChapterCategory.setId("bookChapter"); - - UsageReportPointCategoryRest datasetCategory = new UsageReportPointCategoryRest(); - datasetCategory.addValue("views", 0); - datasetCategory.setId("dataset"); - - List categories = List.of(articleCategory, thesisCategory, otherCategory, bookCategory, - bookChapterCategory, datasetCategory); - - // And request the collections global usage report (show top most popular items) - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object") - .param("category", "publicationCollection-itemReports") - .param("uri", "http://localhost:8080/server/api/core/collections/" + collectionNotVisited.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_ITEMS_VISITS_REPORT_ID, - TOP_ITEMS_REPORT_ID, points), - matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, List.of(pointCity)), - matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), - matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CONTINENTS_REPORT_ID, - TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), - matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CATEGORIES_REPORT_ID, - TOP_CATEGORIES_REPORT_ID, categories), - matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 1); + expectedPoint1.setType("item"); + expectedPoint1.setLabel("My item"); + expectedPoint1.setId(item.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint2.addValue("views", 2); + expectedPoint2.setType("item"); + expectedPoint2.setLabel("My item 2"); + expectedPoint2.setId(item2.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint3.addValue("views", 1); + expectedPoint3.setType("item"); + expectedPoint3.setLabel("My item 3"); + expectedPoint3.setId(item3.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint4 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint4.addValue("views", 1); + expectedPoint4.setType("item"); + expectedPoint4.setLabel("My item 4"); + expectedPoint4.setId(item4.getID().toString()); + + List points = + List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); + + UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); + pointCity.addValue("views", 5); + pointCity.setId("New York"); + + UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); + pointContinent.addValue("views", 5); + pointContinent.setId("North America"); + + UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); + pointCountry.addValue("views", 5); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + UsageReportPointCategoryRest articleCategory = new UsageReportPointCategoryRest(); + articleCategory.addValue("views", 1); + articleCategory.setId("article"); + + UsageReportPointCategoryRest thesisCategory = new UsageReportPointCategoryRest(); + thesisCategory.addValue("views", 3); + thesisCategory.setId("thesis"); + + UsageReportPointCategoryRest otherCategory = new UsageReportPointCategoryRest(); + otherCategory.addValue("views", 1); + otherCategory.setId("other"); + + UsageReportPointCategoryRest bookCategory = new UsageReportPointCategoryRest(); + bookCategory.addValue("views", 0); + bookCategory.setId("book"); + + UsageReportPointCategoryRest bookChapterCategory = new UsageReportPointCategoryRest(); + bookChapterCategory.addValue("views", 0); + bookChapterCategory.setId("bookChapter"); + + UsageReportPointCategoryRest datasetCategory = new UsageReportPointCategoryRest(); + datasetCategory.addValue("views", 0); + datasetCategory.setId("dataset"); + + List categories = + List.of(articleCategory, thesisCategory, otherCategory, bookCategory, + bookChapterCategory, datasetCategory); + + // And request the collections global usage report (show top most popular items) + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "publicationCollection-itemReports") + .param("uri", "http://localhost:8080/server/api/core/collections/" + + collectionNotVisited.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_ITEMS_VISITS_REPORT_ID, + TOP_ITEMS_REPORT_ID, points), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, List.of(pointCity)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, categories), + matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + })); } @Test @@ -2716,7 +2886,8 @@ public void usageReportsSearch_Collection_DownloadReports() throws Exception { getClient(adminToken) .perform(get("/api/statistics/usagereports/search/object") .param("category", "collection-downloadReports") - .param("uri", "http://localhost:8080/server/api/core/collections/" + collectionNotVisited.getID())) + .param("uri", + "http://localhost:8080/server/api/core/collections/" + collectionNotVisited.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( @@ -2764,8 +2935,8 @@ public void usageReportsSearch_Community_ItemReports() throws Exception { viewEventRest.setTargetId(item.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest2 = new ViewEventRest(); @@ -2773,13 +2944,13 @@ public void usageReportsSearch_Community_ItemReports() throws Exception { viewEventRest2.setTargetId(item2.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest2)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) .andExpect(status().isCreated()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest2)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest2)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest3 = new ViewEventRest(); @@ -2787,8 +2958,8 @@ public void usageReportsSearch_Community_ItemReports() throws Exception { viewEventRest3.setTargetId(item3.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest3)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest3)) + .contentType(contentType)) .andExpect(status().isCreated()); ViewEventRest viewEventRest4 = new ViewEventRest(); @@ -2796,111 +2967,120 @@ public void usageReportsSearch_Community_ItemReports() throws Exception { viewEventRest4.setTargetId(item4.getID()); getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) .andExpect(status().isCreated()); - UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint1.addValue("views", 1); - expectedPoint1.setType("item"); - expectedPoint1.setLabel("My item"); - expectedPoint1.setId(item.getID().toString()); - - UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint2.addValue("views", 2); - expectedPoint2.setType("item"); - expectedPoint2.setLabel("My item 2"); - expectedPoint2.setId(item2.getID().toString()); - - UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint3.addValue("views", 1); - expectedPoint3.setType("item"); - expectedPoint3.setLabel("My item 3"); - expectedPoint3.setId(item3.getID().toString()); - - UsageReportPointDsoTotalVisitsRest expectedPoint4 = new UsageReportPointDsoTotalVisitsRest(); - expectedPoint4.addValue("views", 1); - expectedPoint4.setType("item"); - expectedPoint4.setLabel("My item 4"); - expectedPoint4.setId(item4.getID().toString()); - - List points = List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); - - UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); - pointCity.addValue("views", 5); - pointCity.setId("New York"); - - UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); - pointContinent.addValue("views", 5); - pointContinent.setId("North America"); - - UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); - pointCountry.addValue("views", 5); - pointCountry.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); - - UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); - publicationCategory.addValue("views", 1); - publicationCategory.setId("publication"); - - UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); - patentCategory.addValue("views", 2); - patentCategory.setId("patent"); - - UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); - fundingCategory.addValue("views", 1); - fundingCategory.setId("funding"); - - UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); - projectCategory.addValue("views", 1); - projectCategory.setId("project"); - - UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); - productCategory.addValue("views", 0); - productCategory.setId("product"); - - UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); - journalCategory.addValue("views", 0); - journalCategory.setId("journal"); - - UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); - personCategory.addValue("views", 0); - personCategory.setId("person"); - - UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); - orgUnitCategory.addValue("views", 0); - orgUnitCategory.setId("orgunit"); - - UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); - equipmentCategory.addValue("views", 0); - equipmentCategory.setId("equipment"); - - UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); - eventCategory.addValue("views", 0); - eventCategory.setId("event"); - - List categories = List.of(publicationCategory, patentCategory, fundingCategory, - projectCategory, productCategory, journalCategory, personCategory, orgUnitCategory, - equipmentCategory, eventCategory); - // And request the collections global usage report (show top most popular items) - getClient(adminToken) - .perform(get("/api/statistics/usagereports/search/object") - .param("category", "community-itemReports") - .param("uri", "http://localhost:8080/server/api/core/communities/" + community.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) - .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( - matchUsageReport(community.getID() + "_" + TOTAL_ITEMS_VISITS_REPORT_ID, - TOP_ITEMS_REPORT_ID, points), - matchUsageReport(community.getID() + "_" + TOP_ITEMS_CITIES_REPORT_ID, - TOP_CITIES_REPORT_ID, List.of(pointCity)), - matchUsageReport(community.getID() + "_" + TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID, - TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), - matchUsageReport(community.getID() + "_" + TOP_ITEMS_CONTINENTS_REPORT_ID, - TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), - matchUsageReport(community.getID() + "_" + TOP_ITEMS_CATEGORIES_REPORT_ID, - TOP_CATEGORIES_REPORT_ID, categories), - matchUsageReport(community.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + + UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint1.addValue("views", 1); + expectedPoint1.setType("item"); + expectedPoint1.setLabel("My item"); + expectedPoint1.setId(item.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint2 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint2.addValue("views", 2); + expectedPoint2.setType("item"); + expectedPoint2.setLabel("My item 2"); + expectedPoint2.setId(item2.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint3 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint3.addValue("views", 1); + expectedPoint3.setType("item"); + expectedPoint3.setLabel("My item 3"); + expectedPoint3.setId(item3.getID().toString()); + + UsageReportPointDsoTotalVisitsRest expectedPoint4 = new UsageReportPointDsoTotalVisitsRest(); + expectedPoint4.addValue("views", 1); + expectedPoint4.setType("item"); + expectedPoint4.setLabel("My item 4"); + expectedPoint4.setId(item4.getID().toString()); + + List points = + List.of(expectedPoint1, expectedPoint2, expectedPoint3, expectedPoint4); + + UsageReportPointCityRest pointCity = new UsageReportPointCityRest(); + pointCity.addValue("views", 5); + pointCity.setId("New York"); + + UsageReportPointContinentRest pointContinent = new UsageReportPointContinentRest(); + pointContinent.addValue("views", 5); + pointContinent.setId("North America"); + + UsageReportPointCountryRest pointCountry = new UsageReportPointCountryRest(); + pointCountry.addValue("views", 5); + pointCountry.setIdAndLabel(Locale.US.getCountry(), + Locale.US.getDisplayCountry(context.getCurrentLocale())); + + UsageReportPointCategoryRest publicationCategory = new UsageReportPointCategoryRest(); + publicationCategory.addValue("views", 1); + publicationCategory.setId("publication"); + + UsageReportPointCategoryRest patentCategory = new UsageReportPointCategoryRest(); + patentCategory.addValue("views", 2); + patentCategory.setId("patent"); + + UsageReportPointCategoryRest fundingCategory = new UsageReportPointCategoryRest(); + fundingCategory.addValue("views", 1); + fundingCategory.setId("funding"); + + UsageReportPointCategoryRest projectCategory = new UsageReportPointCategoryRest(); + projectCategory.addValue("views", 1); + projectCategory.setId("project"); + + UsageReportPointCategoryRest productCategory = new UsageReportPointCategoryRest(); + productCategory.addValue("views", 0); + productCategory.setId("product"); + + UsageReportPointCategoryRest journalCategory = new UsageReportPointCategoryRest(); + journalCategory.addValue("views", 0); + journalCategory.setId("journal"); + + UsageReportPointCategoryRest personCategory = new UsageReportPointCategoryRest(); + personCategory.addValue("views", 0); + personCategory.setId("person"); + + UsageReportPointCategoryRest orgUnitCategory = new UsageReportPointCategoryRest(); + orgUnitCategory.addValue("views", 0); + orgUnitCategory.setId("orgunit"); + + UsageReportPointCategoryRest equipmentCategory = new UsageReportPointCategoryRest(); + equipmentCategory.addValue("views", 0); + equipmentCategory.setId("equipment"); + + UsageReportPointCategoryRest eventCategory = new UsageReportPointCategoryRest(); + eventCategory.addValue("views", 0); + eventCategory.setId("event"); + + List categories = List.of(publicationCategory, patentCategory, + fundingCategory, + projectCategory, productCategory, journalCategory, + personCategory, orgUnitCategory, + equipmentCategory, eventCategory); + // And request the collections global usage report (show top most popular items) + getClient(adminToken) + .perform(get("/api/statistics/usagereports/search/object") + .param("category", "community-itemReports") + .param("uri", + "http://localhost:8080/server/api/core/communities/" + community.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) + .andExpect(jsonPath("$._embedded.usagereports", Matchers.containsInAnyOrder( + matchUsageReport(community.getID() + "_" + TOTAL_ITEMS_VISITS_REPORT_ID, + TOP_ITEMS_REPORT_ID, points), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_CITIES_REPORT_ID, + TOP_CITIES_REPORT_ID, List.of(pointCity)), + matchUsageReport(community.getID() + "_" + TOTAL_ITEMS_VISITS_PER_MONTH_REPORT_ID, + TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_CONTINENTS_REPORT_ID, + TOP_CONTINENTS_REPORT_ID, List.of(pointContinent)), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_CATEGORIES_REPORT_ID, + TOP_CATEGORIES_REPORT_ID, categories), + matchUsageReport(community.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); + })); } @Test @@ -3004,6 +3184,12 @@ TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(8)), TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); } + private LocalDate toLocalDate(Date date) { + return date.toInstant() + .atZone(ZoneId.systemDefault()) + .toLocalDate(); + } + private List getLastMonthVisitPoints(int viewsLastMonth) { return getListOfVisitsPerMonthsPoints(viewsLastMonth, 0); } @@ -3015,10 +3201,45 @@ private List getListOfVisitsPerMonthsPoints(int viewsLastM return getListOfVisitsPerMonthsPoints(viewsLastMonth, nrOfMonthsBack); } - private LocalDate toLocalDate(Date date) { - return date.toInstant() - .atZone(ZoneId.systemDefault()) - .toLocalDate(); + private static final class StatisticsEventListener implements EventListener { + + public Queue> consumers = new LinkedList<>(); + + /* (non-Javadoc) + * @see org.dspace.services.model.EventListener#getEventNamePrefixes() + */ + public String[] getEventNamePrefixes() { + return null; + } + + /* (non-Javadoc) + * @see org.dspace.services.model.EventListener#getResourcePrefix() + */ + public String getResourcePrefix() { + return null; + } + + public void addConsumer(Consumer... consumers) { + this.consumers.addAll(List.of(consumers)); + } + + public Queue> getConsumers() { + return this.consumers; + } + + public void clearConsumers() { + this.consumers.clear(); + } + + /* (non-Javadoc) + * @see org.dspace.services.model.EventListener#receiveEvent(org.dspace.services.model.Event) + */ + public void receiveEvent(Event event) { + Consumer poll = this.consumers.poll(); + if (poll != null) { + poll.accept(event); + } + } } private List getListOfVisitsPerMonthsPoints(int viewsLastMonth, int nrOfMonthsBack) { From 87e820f12a4a2754904538110abd2da33f7c76c4 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 28 Nov 2023 18:49:14 +0100 Subject: [PATCH 612/686] [DSC-1053] Fixes UpdateViewAndDownloadMetricsIT failures --- .../app/rest/StatisticsRestRepositoryIT.java | 2 +- .../rest/UpdateViewAndDownloadMetricsIT.java | 454 ++++++++++-------- 2 files changed, 244 insertions(+), 212 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java index 8956cdc7d809..db043ff9379a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java @@ -3201,7 +3201,7 @@ private List getListOfVisitsPerMonthsPoints(int viewsLastM return getListOfVisitsPerMonthsPoints(viewsLastMonth, nrOfMonthsBack); } - private static final class StatisticsEventListener implements EventListener { + public static final class StatisticsEventListener implements EventListener { public Queue> consumers = new LinkedList<>(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java index 94e4b2858752..d33d4d0c0bcd 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UpdateViewAndDownloadMetricsIT.java @@ -10,6 +10,7 @@ import static org.apache.commons.codec.CharEncoding.UTF_8; import static org.apache.commons.io.IOUtils.toInputStream; import static org.dspace.app.launcher.ScriptLauncher.handleScript; +import static org.dspace.util.FunctionalUtils.throwingConsumerWrapper; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -25,6 +26,7 @@ import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.metrics.CrisMetrics; import org.dspace.app.metrics.service.CrisMetricsService; +import org.dspace.app.rest.StatisticsRestRepositoryIT.StatisticsEventListener; import org.dspace.app.rest.model.ViewEventRest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; @@ -38,6 +40,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.services.ConfigurationService; +import org.dspace.services.EventService; import org.dspace.statistics.factory.StatisticsServiceFactory; import org.dspace.statistics.service.SolrLoggerService; import org.junit.Before; @@ -53,14 +56,19 @@ public class UpdateViewAndDownloadMetricsIT extends AbstractControllerIntegratio private CrisMetricsService crisMetriscService; @Autowired ConfigurationService configurationService; + @Autowired + protected EventService eventService; CrisMetrics crisMetrics = null; + protected final StatisticsEventListener statisticsEventListener = new StatisticsEventListener(); + @Before @Override public void setUp() throws Exception { super.setUp(); // Explicitly use solr commit in SolrLoggerServiceImpl#postView configurationService.setProperty("solr-statistics.autoCommit", false); + this.eventService.registerEventListener(this.statisticsEventListener); } @@ -105,24 +113,26 @@ public void storeCrisMetricsForItemWithViewAndDownloads() throws Exception { .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - //find view and downloads metrics - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + //find view and downloads metrics + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "download", itemVisited.getID()); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "view", itemVisited.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertEquals("download", metrics_downloads.getMetricType()); - assertEquals(2, metrics_downloads.getMetricCount(), 0); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertNull(metrics_downloads.getDeltaPeriod1()); - assertNull(metrics_views.getDeltaPeriod2()); - assertTrue(metrics_views.getLast()); - assertTrue(metrics_downloads.getLast()); + assertEquals("view", metrics_views.getMetricType()); + assertEquals("download", metrics_downloads.getMetricType()); + assertEquals(2, metrics_downloads.getMetricCount(), 0); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertNull(metrics_downloads.getDeltaPeriod1()); + assertNull(metrics_views.getDeltaPeriod2()); + assertTrue(metrics_views.getLast()); + assertTrue(metrics_downloads.getLast()); + })); } //test only with views @@ -148,21 +158,25 @@ public void storeCrisMetricsForItemWithViews() throws Exception { .content(mapper.writeValueAsBytes(viewEventRestItem)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "view", itemVisited.getID()); - // find downloads metric - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + // find downloads metric + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "download", itemVisited.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertTrue(metrics_views.getLast()); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertNull(metrics_views.getDeltaPeriod2()); - // must be null because for the item there are not downloads - assertNull(metrics_downloads); + assertEquals("view", metrics_views.getMetricType()); + assertTrue(metrics_views.getLast()); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertNull(metrics_views.getDeltaPeriod2()); + // must be null because for the item there are not downloads + assertNull(metrics_downloads); + })); } //test with previous metrics @@ -209,26 +223,29 @@ public void storeCrisMetricsForItemWithViewAndDownloadsWithExistingValues() thro .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "download", itemVisited.getID()); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( context, "view", itemVisited.getID()); - // find previous metric - CrisMetrics old_metric = crisMetriscService.find(context, crisMetrics.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertEquals("download", metrics_downloads.getMetricType()); - assertEquals(2, metrics_downloads.getMetricCount(), 0); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertNull(metrics_downloads.getDeltaPeriod1()); - assertNull(metrics_views.getDeltaPeriod2()); - assertTrue(metrics_views.getLast()); - assertTrue(metrics_downloads.getLast()); - // previous metric must have last value false - assertFalse(old_metric.getLast()); + // find previous metric + CrisMetrics old_metric = crisMetriscService.find(context, crisMetrics.getID()); + assertEquals("view", metrics_views.getMetricType()); + assertEquals("download", metrics_downloads.getMetricType()); + assertEquals(2, metrics_downloads.getMetricCount(), 0); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertNull(metrics_downloads.getDeltaPeriod1()); + assertNull(metrics_views.getDeltaPeriod2()); + assertTrue(metrics_views.getLast()); + assertTrue(metrics_downloads.getLast()); + // previous metric must have last value false + assertFalse(old_metric.getLast()); + })); } //test with previous week and month views and downloads @@ -298,36 +315,41 @@ context, itemVisited, toInputStream("test", UTF_8)) .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[] {"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "download", itemVisited.getID()); - CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", itemVisited.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, crisMetrics_previous_week_views.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, crisMetrics_previous_month_views.getID()); - CrisMetrics old_metric_downloads_month = crisMetriscService.find(context, - crisMetrics_previous_month_downloads.getID()); - CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, - crisMetrics_previous_week_downloads.getID()); - assertEquals("view", metrics_views.getMetricType()); - assertEquals("download", metrics_downloads.getMetricType()); - assertEquals(2, metrics_downloads.getMetricCount(), 0); - assertEquals(1, metrics_views.getMetricCount(), 0); - assertTrue(metrics_views.getLast()); - assertTrue(metrics_downloads.getLast()); - assertTrue(metrics_views.getDeltaPeriod1() == 0); - assertTrue(metrics_views.getDeltaPeriod2() == 0); - assertTrue(metrics_downloads.getDeltaPeriod1() == 1); - assertTrue(metrics_downloads.getDeltaPeriod2() == 0); - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_downloads_month.getLast()); - assertFalse(old_metric_downloads_week.getLast()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_downloads = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "download", itemVisited.getID()); + CrisMetrics metrics_views = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", itemVisited.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = + crisMetriscService.find(context, crisMetrics_previous_week_views.getID()); + CrisMetrics old_metric_views_week = + crisMetriscService.find(context, crisMetrics_previous_month_views.getID()); + CrisMetrics old_metric_downloads_month = crisMetriscService.find(context, + crisMetrics_previous_month_downloads.getID()); + CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, + crisMetrics_previous_week_downloads.getID()); + assertEquals("view", metrics_views.getMetricType()); + assertEquals("download", metrics_downloads.getMetricType()); + assertEquals(2, metrics_downloads.getMetricCount(), 0); + assertEquals(1, metrics_views.getMetricCount(), 0); + assertTrue(metrics_views.getLast()); + assertTrue(metrics_downloads.getLast()); + assertTrue(metrics_views.getDeltaPeriod1() == 0); + assertTrue(metrics_views.getDeltaPeriod2() == 0); + assertTrue(metrics_downloads.getDeltaPeriod1() == 1); + assertTrue(metrics_downloads.getDeltaPeriod2() == 0); + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_downloads_month.getLast()); + assertFalse(old_metric_downloads_week.getLast()); + })); } @@ -407,46 +429,49 @@ context, item, toInputStream("test", UTF_8)) .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", item.getID()); - CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "download", item.getID()); - CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", community.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, - crisMetrics_previous_month_views_comm.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, - crisMetrics_previous_week_views_comm.getID()); - CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, - crisMetrics_previous_week_downloads.getID()); - - - //control download values - assertEquals("download", metric_download.getMetricType()); - assertEquals(1, metric_download.getMetricCount(), 0); - assertTrue(metric_download.getLast()); - assertTrue(metric_download.getDeltaPeriod1() == 0.0); - - assertEquals("view", metrics_views_comm.getMetricType()); - assertTrue(metrics_views_comm.getLast()); - assertEquals(2, metrics_views_comm.getMetricCount(), 0); - assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); - - assertEquals("view", metric_view_item.getMetricType()); - assertEquals(1, metric_view_item.getMetricCount(), 0); - assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); - assertTrue(metric_view_item.getLast()); - - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_downloads_week.getLast()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", item.getID()); + CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "download", item.getID()); + CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", community.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = crisMetriscService.find(context, + crisMetrics_previous_month_views_comm.getID()); + CrisMetrics old_metric_views_week = crisMetriscService.find(context, + crisMetrics_previous_week_views_comm.getID()); + CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, + crisMetrics_previous_week_downloads.getID()); + + + //control download values + assertEquals("download", metric_download.getMetricType()); + assertEquals(1, metric_download.getMetricCount(), 0); + assertTrue(metric_download.getLast()); + assertTrue(metric_download.getDeltaPeriod1() == 0.0); + + assertEquals("view", metrics_views_comm.getMetricType()); + assertTrue(metrics_views_comm.getLast()); + assertEquals(2, metrics_views_comm.getMetricCount(), 0); + assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); + + assertEquals("view", metric_view_item.getMetricType()); + assertEquals(1, metric_view_item.getMetricCount(), 0); + assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); + assertTrue(metric_view_item.getLast()); + + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_downloads_week.getLast()); + })); } //test with previous week and month views and downloads for community collection and items together @@ -552,64 +577,67 @@ context, item, toInputStream("test", UTF_8)) .content(mapper.writeValueAsBytes(viewEventRestBitstream)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", item.getID()); - CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "download", item.getID()); - CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", community.getID()); - CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", col1.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, - crisMetrics_previous_month_views_comm.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, - crisMetrics_previous_week_views_comm.getID()); - CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, - crisMetrics_previous_week_downloads.getID()); - CrisMetrics old_metric_view_week_items = crisMetriscService.find(context, - crisMetrics_previous_week_views_item.getID()); - CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, - crisMetrics_previous_week_views_col.getID()); - CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, - crisMetrics_previous_month_views_col.getID()); - - //control download values - assertEquals("download", metric_download.getMetricType()); - assertEquals(1, metric_download.getMetricCount(), 0); - assertTrue(metric_download.getLast()); - assertTrue(metric_download.getDeltaPeriod1() == 0.0); - - assertEquals("view", metrics_views_comm.getMetricType()); - assertTrue(metrics_views_comm.getLast()); - assertEquals(2, metrics_views_comm.getMetricCount(), 0); - assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); - - assertEquals("view", metric_view_item.getMetricType()); - assertEquals(1, metric_view_item.getMetricCount(), 0); - assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); - assertTrue(metric_view_item.getLast()); - - - assertEquals("view", metrics_views_cols.getMetricType()); - assertEquals(2, metrics_views_cols.getMetricCount(), 0); - assertTrue(metrics_views_cols.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); - assertTrue(metrics_views_cols.getLast()); - - - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_downloads_week.getLast()); - assertFalse(old_metric_view_week_items.getLast()); - assertFalse(old_metric_view_week_col.getLast()); - assertFalse(old_metric_view_month_col.getLast()); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metric_view_item = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", item.getID()); + CrisMetrics metric_download = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "download", item.getID()); + CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", community.getID()); + CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", col1.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = crisMetriscService.find(context, + crisMetrics_previous_month_views_comm.getID()); + CrisMetrics old_metric_views_week = crisMetriscService.find(context, + crisMetrics_previous_week_views_comm.getID()); + CrisMetrics old_metric_downloads_week = crisMetriscService.find(context, + crisMetrics_previous_week_downloads.getID()); + CrisMetrics old_metric_view_week_items = crisMetriscService.find(context, + crisMetrics_previous_week_views_item.getID()); + CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, + crisMetrics_previous_week_views_col.getID()); + CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, + crisMetrics_previous_month_views_col.getID()); + + //control download values + assertEquals("download", metric_download.getMetricType()); + assertEquals(1, metric_download.getMetricCount(), 0); + assertTrue(metric_download.getLast()); + assertTrue(metric_download.getDeltaPeriod1() == 0.0); + + assertEquals("view", metrics_views_comm.getMetricType()); + assertTrue(metrics_views_comm.getLast()); + assertEquals(2, metrics_views_comm.getMetricCount(), 0); + assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_comm.getDeltaPeriod2() == 1.0); + + assertEquals("view", metric_view_item.getMetricType()); + assertEquals(1, metric_view_item.getMetricCount(), 0); + assertTrue(metric_view_item.getDeltaPeriod1() == 0.0); + assertTrue(metric_view_item.getLast()); + + + assertEquals("view", metrics_views_cols.getMetricType()); + assertEquals(2, metrics_views_cols.getMetricCount(), 0); + assertTrue(metrics_views_cols.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); + assertTrue(metrics_views_cols.getLast()); + + + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_downloads_week.getLast()); + assertFalse(old_metric_view_week_items.getLast()); + assertFalse(old_metric_view_week_col.getLast()); + assertFalse(old_metric_view_month_col.getLast()); + })); } //test with previous week and month views and downloads for community and collection @@ -682,41 +710,45 @@ public void storeCrisMetricsForCommunityAndCollectionWithViewWithPreviousWeekAnd .content(mapper.writeValueAsBytes(viewEventRestColl)) .contentType(contentType)) .andExpect(status().isCreated()); - String[] args = new String[]{"store-metrics"}; - TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); - int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); - assertEquals(0, status); - CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", community.getID()); - CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( - context, "view", col1.getID()); - // find previous metrics - CrisMetrics old_metric_views_month = crisMetriscService.find(context, - crisMetrics_previous_month_views_comm.getID()); - CrisMetrics old_metric_views_week = crisMetriscService.find(context, - crisMetrics_previous_week_views_comm.getID()); - CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, - crisMetrics_previous_week_views_col.getID()); - CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, - crisMetrics_previous_month_views_col.getID()); - - - assertEquals("view", metrics_views_comm.getMetricType()); - assertTrue(metrics_views_comm.getLast()); - assertEquals(2, metrics_views_comm.getMetricCount(), 0); - assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); - assertTrue(metrics_views_comm.getDeltaPeriod2() == 0); - - assertEquals("view", metrics_views_cols.getMetricType()); - assertEquals(2, metrics_views_cols.getMetricCount(), 0); - assertTrue(metrics_views_cols.getDeltaPeriod1() == 0); - assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); - assertTrue(metrics_views_cols.getLast()); - - // all last values of previous must be false - assertFalse(old_metric_views_month.getLast()); - assertFalse(old_metric_views_week.getLast()); - assertFalse(old_metric_view_week_col.getLast()); - assertFalse(old_metric_view_month_col.getLast()); + + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + String[] args = new String[] {"store-metrics"}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + int status = handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl, admin); + assertEquals(0, status); + CrisMetrics metrics_views_comm = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", community.getID()); + CrisMetrics metrics_views_cols = crisMetriscService.findLastMetricByResourceIdAndMetricsTypes( + context, "view", col1.getID()); + // find previous metrics + CrisMetrics old_metric_views_month = crisMetriscService.find(context, + crisMetrics_previous_month_views_comm.getID()); + CrisMetrics old_metric_views_week = crisMetriscService.find(context, + crisMetrics_previous_week_views_comm.getID()); + CrisMetrics old_metric_view_week_col = crisMetriscService.find(context, + crisMetrics_previous_week_views_col.getID()); + CrisMetrics old_metric_view_month_col = crisMetriscService.find(context, + crisMetrics_previous_month_views_col.getID()); + + + assertEquals("view", metrics_views_comm.getMetricType()); + assertTrue(metrics_views_comm.getLast()); + assertEquals(2, metrics_views_comm.getMetricCount(), 0); + assertTrue(metrics_views_comm.getDeltaPeriod1() == 1.0); + assertTrue(metrics_views_comm.getDeltaPeriod2() == 0); + + assertEquals("view", metrics_views_cols.getMetricType()); + assertEquals(2, metrics_views_cols.getMetricCount(), 0); + assertTrue(metrics_views_cols.getDeltaPeriod1() == 0); + assertTrue(metrics_views_cols.getDeltaPeriod2() == -1.0); + assertTrue(metrics_views_cols.getLast()); + + // all last values of previous must be false + assertFalse(old_metric_views_month.getLast()); + assertFalse(old_metric_views_week.getLast()); + assertFalse(old_metric_view_week_col.getLast()); + assertFalse(old_metric_view_month_col.getLast()); + })); } } From bbfd222ce31fa12a4b4aaedae374cd20402c1b83 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 28 Nov 2023 22:42:24 +0100 Subject: [PATCH 613/686] [DSC-1053] Fixes ViewEventRestRepositoryIT failure --- .../src/main/java/org/dspace/core/AbstractHibernateDAO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index a37bed89c4b0..c217eed6ab92 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -136,7 +136,7 @@ public boolean exists(Context context, Class clazz, UUID id) throws SQLExcept org.hibernate.query.Query query = getHibernateSession(context).createQuery(criteriaQuery); query.setMaxResults(1); - return query.getSingleResult() != null; + return query.uniqueResult() != null; } @Override From f663ab8cdc53928ce5987978142f6f939612946f Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 29 Nov 2023 09:52:33 +0100 Subject: [PATCH 614/686] [DSC-1053] Fixes StatisticsRestRepositoryIT failure --- .../app/rest/StatisticsRestRepositoryIT.java | 243 ++++++++++-------- 1 file changed, 132 insertions(+), 111 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java index db043ff9379a..ba26f161aa17 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java @@ -444,6 +444,7 @@ public void totalVisitsReport_Collection_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -491,13 +492,6 @@ public void totalVisitsReport_Collection_NotVisited() throws Exception { @Test public void totalVisitsReport_Item_Visited() throws Exception { - // ** WHEN ** - // We visit an Item - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("item"); - viewEventRest.setTargetId(itemVisited.getID()); - - Thread.sleep(1000); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -517,6 +511,12 @@ public void totalVisitsReport_Item_Visited() throws Exception { ))); })); + // ** WHEN ** + // We visit an Item + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(itemVisited.getID()); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") @@ -710,18 +710,6 @@ public void totalVisitsReport_Bitstream_NotVisited() throws Exception { @Test public void totalVisitsPerMonthReport_Item_Visited() throws Exception { - // ** WHEN ** - // We visit an Item - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("item"); - viewEventRest.setTargetId(itemVisited.getID()); - - ObjectMapper mapper = new ObjectMapper(); - - getClient(loggedInToken).perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -788,6 +776,19 @@ public void totalVisitsPerMonthReport_Item_Visited() throws Exception { ) ))); })); + + // ** WHEN ** + // We visit an Item + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(itemVisited.getID()); + + ObjectMapper mapper = new ObjectMapper(); + + getClient(loggedInToken).perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -993,12 +994,6 @@ public void topCountriesReport_Collection_Visited() throws Exception { viewEventRest.setTargetType("collection"); viewEventRest.setTargetId(collectionVisited.getID()); - ObjectMapper mapper = new ObjectMapper(); - - getClient(loggedInToken).perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { List expectedPoints = List.of( @@ -1053,6 +1048,11 @@ public void topCountriesReport_Collection_Visited() throws Exception { ) ))); })); + ObjectMapper mapper = new ObjectMapper(); + getClient(loggedInToken).perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -1080,30 +1080,35 @@ public void topCountriesReport_Community_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); - getClient(loggedInToken).perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); Thread.sleep(1000); + UsageReportPointCountryRest expectedPoint = new UsageReportPointCountryRest(); expectedPoint.addValue("views", 2); expectedPoint.setIdAndLabel(Locale.US.getCountry(), Locale.US.getDisplayCountry(context.getCurrentLocale())); - // And request that collection's TopCountries report - getClient(adminToken).perform( - get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) - // ** THEN ** - .andExpect(status().isOk()) - .andExpect(jsonPath("$", Matchers.is( - UsageReportMatcher.matchUsageReport( - communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, - TOP_COUNTRIES_REPORT_ID, - List.of( - getExpectedCountryViews("US", "United States", 2) - ) - ) - ))); + this.statisticsEventListener.addConsumer( + throwingConsumerWrapper((event) -> { + // And request that collection's TopCountries report + getClient(adminToken).perform( + get("/api/statistics/usagereports/" + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID)) + // ** THEN ** + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.is( + UsageReportMatcher.matchUsageReport( + communityVisited.getID() + "_" + TOP_COUNTRIES_REPORT_ID, + TOP_COUNTRIES_REPORT_ID, + List.of( + getExpectedCountryViews("US", "United States", 2) + ) + ) + ))); + })); + + getClient(loggedInToken).perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } /** @@ -1423,10 +1428,6 @@ public void usageReportsSearch_Site_mainReports() throws Exception { viewEventRest4.setTargetType("item"); viewEventRest4.setTargetId(item4.getID()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) - .andExpect(status().isCreated()); UsageReportPointDsoTotalVisitsRest expectedPoint1 = new UsageReportPointDsoTotalVisitsRest(); expectedPoint1.addValue("views", 1); @@ -1512,6 +1513,8 @@ public void usageReportsSearch_Site_mainReports() throws Exception { projectCategory, productCategory, journalCategory, personCategory, orgUnitCategory, equipmentCategory, eventCategory); + Thread.sleep(1000); + this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { // And request the sites global usage report (show top most popular items) @@ -1538,6 +1541,11 @@ TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -1761,18 +1769,7 @@ public void usageReportsSearch_Collection_NotVisited() throws Exception { @Test public void usageReportsSearch_Item_Visited_FileNotVisited() throws Exception { - // ** WHEN ** - // We visit an item - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("item"); - viewEventRest.setTargetId(itemVisited.getID()); - ObjectMapper mapper = new ObjectMapper(); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = @@ -1847,6 +1844,19 @@ public void usageReportsSearch_Item_Visited_FileNotVisited() throws Exception { ) ))); })); + + // ** WHEN ** + // We visit an item + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(itemVisited.getID()); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -2073,16 +2083,7 @@ public void usageReportsSearch_ItemNotVisited_AtTime() throws Exception { Site site = SiteBuilder.createSite(context).build(); //create new item using ItemBuilder context.restoreAuthSystemState(); - //visit first item now - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("item"); - viewEventRest.setTargetId(itemVisited.getID()); - ObjectMapper mapper = new ObjectMapper(); - //add visit for first item - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest)) - .contentType(contentType)) - .andExpect(status().isCreated()); + this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { //create expected raport points @@ -2170,23 +2171,22 @@ public void usageReportsSearch_ItemNotVisited_AtTime() throws Exception { matchUsageReport(site.getID() + "_" + TOP_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, List.of())))); })); - } - // This test search for statistics one day after the moment in which community is visited - @Test - public void usageReportsSearch_Community_VisitedAtTime() throws Exception { - // ** WHEN ** - // We visit a community + //visit first item now ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("community"); - viewEventRest.setTargetId(communityVisited.getID()); - + viewEventRest.setTargetType("item"); + viewEventRest.setTargetId(itemVisited.getID()); ObjectMapper mapper = new ObjectMapper(); - + //add visit for first item getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); + .andExpect(status().isCreated()); + } + + // This test search for statistics one day after the moment in which community is visited + @Test + public void usageReportsSearch_Community_VisitedAtTime() throws Exception { this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -2233,23 +2233,25 @@ public void usageReportsSearch_Community_VisitedAtTime() throws Exception { Arrays.asList(expectedPointCountry)) ))); })); - } - // filter bitstream only with start date - @Test - public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { // ** WHEN ** - // We visit a bitstream + // We visit a community ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("bitstream"); - viewEventRest.setTargetId(bitstreamVisited.getID()); + viewEventRest.setTargetType("community"); + viewEventRest.setTargetId(communityVisited.getID()); ObjectMapper mapper = new ObjectMapper(); getClient().perform(post("/api/statistics/viewevents") .content(mapper.writeValueAsBytes(viewEventRest)) .contentType(contentType)) - .andExpect(status().isCreated()); + .andExpect(status().isCreated()); + } + + // filter bitstream only with start date + @Test + public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { + this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { UsageReportPointDsoTotalVisitsRest expectedPointTotalVisits = @@ -2310,6 +2312,19 @@ public void usageReportsSearch_Bitstream_VisitedFromTime() throws Exception { Arrays.asList(expectedPointTotalVisits)) ))); })); + + // ** WHEN ** + // We visit a bitstream + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("bitstream"); + viewEventRest.setTargetId(bitstreamVisited.getID()); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); } //test for inverse relation between person and publication @@ -2539,10 +2554,7 @@ public void usageReportsSearch_OrgUnitWithPublicationVisited() throws Exception .contentType(contentType)) .andExpect(status().isCreated()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) - .contentType(contentType)) - .andExpect(status().isCreated()); + Thread.sleep(1000); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -2608,33 +2620,38 @@ public void usageReportsSearch_OrgUnitWithPublicationVisited() throws Exception .andExpect(jsonPath("$._embedded.usagereports", not(empty()))) .andExpect(jsonPath("$._embedded.usagereports", Matchers.hasItems( UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_VISITS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOTAL_VISITS_REPORT_ID, List.of(totalVisitRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_VISITS_PER_MONTH_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(3)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOP_CITIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOP_CITIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOP_CITIES_REPORT_ID, List.of(expectedPointCityWithRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOP_COUNTRIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOP_COUNTRIES_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOP_COUNTRIES_REPORT_ID, List.of(expectedPointCountryWithRelation)), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOP_ITEMS_REPORT_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOP_ITEMS_REPORT_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOP_ITEMS_REPORT_ID, points), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_DOWNLOADS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_DOWNLOADS_REPORT_ID_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOTAL_DOWNLOADS_REPORT_ID, totalDownloadsPoints), UsageReportMatcher.matchUsageReport(orgUnit.getID() + "_" + - TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, + TOTAL_VISITS_TOTAL_DOWNLOADS_RELATION_ORGUNIT_RP_RESEARCHOUTPUTS, TOTAL_VISITS_TOTAL_DOWNLOADS, totalDownloadsAndViewsPoints) ))); })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRestSecondPublicationBitstream)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -2699,15 +2716,12 @@ public void usageReportsSearch_Collection_ItemReports() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); + Thread.sleep(1000); + ViewEventRest viewEventRest4 = new ViewEventRest(); viewEventRest4.setTargetType("item"); viewEventRest4.setTargetId(item4.getID()); - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) - .andExpect(status().isCreated()); - this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -2801,6 +2815,11 @@ TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), matchUsageReport(collectionNotVisited.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); })); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test @@ -2962,14 +2981,7 @@ public void usageReportsSearch_Community_ItemReports() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); - ViewEventRest viewEventRest4 = new ViewEventRest(); - viewEventRest4.setTargetType("item"); - viewEventRest4.setTargetId(item4.getID()); - - getClient().perform(post("/api/statistics/viewevents") - .content(mapper.writeValueAsBytes(viewEventRest4)) - .contentType(contentType)) - .andExpect(status().isCreated()); + Thread.sleep(1000); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -3081,6 +3093,15 @@ TOTAL_VISITS_PER_MONTH_REPORT_ID, getLastMonthVisitPoints(5)), matchUsageReport(community.getID() + "_" + TOP_ITEMS_COUNTRIES_REPORT_ID, TOP_COUNTRIES_REPORT_ID, List.of(pointCountry))))); })); + + ViewEventRest viewEventRest4 = new ViewEventRest(); + viewEventRest4.setTargetType("item"); + viewEventRest4.setTargetId(item4.getID()); + + getClient().perform(post("/api/statistics/viewevents") + .content(mapper.writeValueAsBytes(viewEventRest4)) + .contentType(contentType)) + .andExpect(status().isCreated()); } @Test From 190c2e240c162a3e62cc9e3bf74323d70481be65 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Wed, 29 Nov 2023 16:03:57 +0100 Subject: [PATCH 615/686] [DSC-1053] Fixes StatisticsRestRepositoryIT failure --- .../app/rest/StatisticsRestRepositoryIT.java | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java index ba26f161aa17..adf2ea830613 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/StatisticsRestRepositoryIT.java @@ -493,6 +493,8 @@ public void totalVisitsReport_Collection_NotVisited() throws Exception { @Test public void totalVisitsReport_Item_Visited() throws Exception { + Thread.sleep(1000); + this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { // And request that collection's TotalVisits stat report @@ -825,7 +827,7 @@ public void totalVisitsPerMonthReport_Collection_Visited() throws Exception { .contentType(contentType)) .andExpect(status().isCreated()); - Thread.sleep(1000); + Thread.sleep(3000); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -921,12 +923,8 @@ public void TotalDownloadsReport_Bitstream() throws Exception { @Test public void TotalDownloadsReport_Item() throws Exception { - // ** WHEN ** - // We visit an Item's bitstream - ViewEventRest viewEventRest = new ViewEventRest(); - viewEventRest.setTargetType("bitstream"); - viewEventRest.setTargetId(bitstreamVisited.getID()); + Thread.sleep(1000); this.statisticsEventListener.addConsumer( throwingConsumerWrapper((event) -> { @@ -956,6 +954,12 @@ public void TotalDownloadsReport_Item() throws Exception { ))); })); + // ** WHEN ** + // We visit an Item's bitstream + ViewEventRest viewEventRest = new ViewEventRest(); + viewEventRest.setTargetType("bitstream"); + viewEventRest.setTargetId(bitstreamVisited.getID()); + ObjectMapper mapper = new ObjectMapper(); getClient(loggedInToken).perform(post("/api/statistics/viewevents") From e87c9f8e578ecf6bbf1684a1b93c7256ac83db4e Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Wed, 29 Nov 2023 17:07:42 +0200 Subject: [PATCH 616/686] [DSC-737] Restrict export formats by groups and Bulk Item export with metadata and bitstream --- .../dspace/content/BitstreamServiceImpl.java | 7 + .../crosswalk/METSDisseminationCrosswalk.java | 23 +- .../crosswalks/ItemExportCrosswalk.java | 5 + .../METSStreamDisseminationCrosswalk.java | 63 ++++ .../crosswalks/ReferCrosswalk.java | 48 +++ .../crosswalks/ZipItemExportCrosswalk.java | 325 ++++++++++++++++++ .../service/ItemExportFormatServiceImpl.java | 2 + .../content/service/BitstreamService.java | 2 + .../crosswalks/ZipItemExportCrosswalkIT.java | 256 ++++++++++++++ dspace/config/spring/api/crosswalks.xml | 18 +- 10 files changed, 744 insertions(+), 5 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java create mode 100644 dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java create mode 100644 dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 2233bedd034e..7cc346ff8da9 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -407,6 +407,13 @@ public Bitstream getBitstreamByName(Item item, String bundleName, String bitstre return null; } + @Override + public List getBitstreamByBundleName(Item item, String bundleName) throws SQLException { + return itemService.getBundles(item, bundleName).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); + } + @Override public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLException { List bundles = itemService.getBundles(item, bundleName); diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java index b8a4a8aef390..e6156e78d295 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java @@ -72,6 +72,16 @@ public class METSDisseminationCrosswalk private static final String schemaLocation = METS_NS.getURI() + " " + METS_XSD; + private String metsPackagerPlugin; + + public METSDisseminationCrosswalk() { + this.metsPackagerPlugin = METS_PACKAGER_PLUGIN; + } + + public METSDisseminationCrosswalk(String metsPackagerPlugin) { + this.metsPackagerPlugin = metsPackagerPlugin; + } + @Override public Namespace[] getNamespaces() { return (Namespace[]) ArrayUtils.clone(namespaces); @@ -103,10 +113,10 @@ public Element disseminateElement(Context context, DSpaceObject dso) PackageDisseminator dip = (PackageDisseminator) CoreServiceFactory.getInstance().getPluginService() - .getNamedPlugin(PackageDisseminator.class, METS_PACKAGER_PLUGIN); + .getNamedPlugin(PackageDisseminator.class, metsPackagerPlugin); if (dip == null) { throw new CrosswalkInternalException( - "Cannot find a disseminate plugin for package=" + METS_PACKAGER_PLUGIN); + "Cannot find a disseminate plugin for package=" + metsPackagerPlugin); } try { @@ -117,11 +127,16 @@ public Element disseminateElement(Context context, DSpaceObject dso) // Create a temporary file to disseminate into ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - String tempDirectory = (configurationService.hasProperty("upload.temp.dir")) + String tempDirectoryPath = (configurationService.hasProperty("upload.temp.dir")) ? configurationService.getProperty("upload.temp.dir") : System.getProperty("java.io.tmpdir"); - File tempFile = File.createTempFile("METSDissemination" + dso.hashCode(), null, new File(tempDirectory)); + File tempDirectory = new File(tempDirectoryPath); + if (!tempDirectory.exists()) { + tempDirectory.mkdirs(); + } + + File tempFile = File.createTempFile("METSDissemination" + dso.hashCode(), null, tempDirectory); tempFile.deleteOnExit(); // Disseminate METS to temp file diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java index 3a8b5a1524d1..dba686198e8a 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java @@ -11,6 +11,7 @@ import org.dspace.content.crosswalk.CrosswalkMode; import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.core.Context; /** * Implementation of {@link StreamDisseminationCrosswalk} related to item @@ -40,4 +41,8 @@ public default Optional getEntityType() { public default CrosswalkMode getCrosswalkMode() { return CrosswalkMode.SINGLE; } + + public default boolean isAuthorized(Context context) { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java new file mode 100644 index 000000000000..292a1e14f946 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import java.io.IOException; +import java.io.OutputStream; +import java.sql.SQLException; +import javax.annotation.PostConstruct; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.METSDisseminationCrosswalk; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.core.Context; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; + +/** + * Implementation of {@link StreamDisseminationCrosswalk} that produces a METS + * manifest for the DSpace item as a metadata description, using + * {@link METSDisseminationCrosswalk}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class METSStreamDisseminationCrosswalk implements StreamDisseminationCrosswalk { + + private METSDisseminationCrosswalk metsDisseminationCrosswalk; + + @PostConstruct + public void setup() { + metsDisseminationCrosswalk = new METSDisseminationCrosswalk("AIP"); + } + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return metsDisseminationCrosswalk.canDisseminate(dso); + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + + Element element = metsDisseminationCrosswalk.disseminateElement(context, dso); + + XMLOutputter xmlOutputter = new XMLOutputter(Format.getPrettyFormat()); + xmlOutputter.output(element, out); + + } + + @Override + public String getMIMEType() { + return "application/xml"; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java index d54fef41ee68..519d9531cb71 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java @@ -58,6 +58,9 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfigurationUtilsService; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -94,6 +97,9 @@ public class ReferCrosswalk implements ItemExportCrosswalk { @Autowired private MetadataSecurityService metadataSecurityService; + @Autowired + private GroupService groupService; + private Converter converter; private Consumer> linesPostProcessor; @@ -116,6 +122,8 @@ public class ReferCrosswalk implements ItemExportCrosswalk { private CrosswalkMode crosswalkMode; + private List allowedGroups; + @PostConstruct private void postConstruct() throws IOException { String parent = configurationService.getProperty("dspace.dir") + File.separator + "config" + File.separator; @@ -128,6 +136,21 @@ private void postConstruct() throws IOException { } } + @Override + public boolean isAuthorized(Context context) { + if (CollectionUtils.isEmpty(allowedGroups)) { + return true; + } + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return allowedGroups.contains(Group.ANONYMOUS); + } + + return allowedGroups.stream() + .anyMatch(groupName -> isMemberOfGroupNamed(context, ePerson, groupName)); + } + @Override public void disseminate(Context context, DSpaceObject dso, OutputStream out) throws CrosswalkException, IOException, SQLException, AuthorizeException { @@ -136,6 +159,10 @@ public void disseminate(Context context, DSpaceObject dso, OutputStream out) throw new CrosswalkObjectNotSupported("Can only crosswalk an Item with the configured type: " + entityType); } + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + List lines = getItemLines(context, dso, true); if (linesPostProcessor != null) { @@ -154,6 +181,10 @@ public void disseminate(Context context, Iterator dsoIte throw new UnsupportedOperationException("No template defined for multiple items"); } + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + List lines = new ArrayList(); for (TemplateLine line : multipleItemsTemplateLines) { @@ -466,6 +497,15 @@ private boolean hasExpectedEntityType(Item item) { return Objects.equals(itemEntityType, entityType); } + private boolean isMemberOfGroupNamed(Context context, EPerson ePerson, String groupName) { + try { + Group group = groupService.findByName(context, groupName); + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + public void setConverter(Converter converter) { this.converter = converter; } @@ -525,4 +565,12 @@ public void setPubliclyReadable(boolean isPubliclyReadable) { this.publiclyReadable = isPubliclyReadable; } + public List getAllowedGroups() { + return allowedGroups; + } + + public void setAllowedGroups(List allowedGroups) { + this.allowedGroups = allowedGroups; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java new file mode 100644 index 000000000000..2096fa037273 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java @@ -0,0 +1,325 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.CrosswalkMode; +import org.dspace.content.crosswalk.CrosswalkObjectNotSupported; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.exception.SQLRuntimeException; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.storage.bitstore.service.BitstreamStorageService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link ItemExportCrosswalk} that export all the given items + * creating a zip. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ZipItemExportCrosswalk implements ItemExportCrosswalk { + + private static final Logger LOGGER = LoggerFactory.getLogger(ZipItemExportCrosswalk.class); + + @Autowired + private ItemService itemService; + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private BitstreamStorageService bitstreamStorageService; + + @Autowired + private GroupService groupService; + + private String zipName = "items.zip"; + + private String entityType; + + private String bitstreamBundle = "ORIGINAL"; + + private String metadataFileName; + + private StreamDisseminationCrosswalk crosswalk; + + private CrosswalkMode crosswalkMode = CrosswalkMode.MULTIPLE; + + private List allowedGroups; + + @Override + public boolean isAuthorized(Context context) { + if (CollectionUtils.isEmpty(allowedGroups)) { + return true; + } + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return allowedGroups.contains(Group.ANONYMOUS); + } + + return allowedGroups.stream() + .anyMatch(groupName -> isMemberOfGroupNamed(context, ePerson, groupName)); + } + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return dso.getType() == Constants.ITEM && hasExpectedEntityType((Item) dso); + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + this.disseminate(context, Arrays.asList(dso).iterator(), out); + } + + @Override + public void disseminate(Context context, Iterator dsoIterator, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + + Assert.notNull(metadataFileName, "The name of the metadata file is required to perform a bulk item export"); + Assert.notNull(crosswalk, "An instance of DisseminationCrosswalk is required to perform a bulk item export"); + Assert.notNull(zipName, "The name of the zip to be generated is required to perform a bulk item export"); + + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + + createZip(context, dsoIterator, out); + + } + + private void createZip(Context context, Iterator dsoIterator, OutputStream out) + throws CrosswalkObjectNotSupported, IOException { + + try (ZipOutputStream zos = new ZipOutputStream(out)) { + + while (dsoIterator.hasNext()) { + + DSpaceObject dso = dsoIterator.next(); + if (!canDisseminate(context, dso)) { + throw new CrosswalkObjectNotSupported( + "Can only crosswalk an Item with the configured type: " + entityType); + } + + try { + createFolder(context, (Item) dso, zos); + } catch (Exception ex) { + LOGGER.error("An error occurs creating folder for item " + dso.getID(), ex); + } + + } + + } + + } + + private void createFolder(Context context, Item item, ZipOutputStream zos) throws IOException { + + createMetadataEntry(context, item, zos); + + List bitstreams = getBitstreamToExport(item); + for (Bitstream bitstream : bitstreams) { + try { + addBitstreamEntry(context, item, bitstream, zos); + } catch (Exception ex) { + LOGGER.error("An error occurs adding bitstream " + bitstream.getID() + + " to the folder of item " + item.getID(), ex); + } + } + + } + + private void createMetadataEntry(Context context, Item item, ZipOutputStream zos) throws IOException { + ZipEntry metadataEntry = new ZipEntry(getFolderName(item) + "/" + getMetadataFileName()); + zos.putNextEntry(metadataEntry); + zos.write(getMetadataFileNameContent(context, item)); + zos.closeEntry(); + } + + private byte[] getMetadataFileNameContent(Context context, Item item) { + try { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + crosswalk.disseminate(context, item, out); + return out.toByteArray(); + } catch (CrosswalkException | IOException | SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + } + + private List getBitstreamToExport(Item item) { + try { + return bitstreamService.getBitstreamByBundleName(item, bitstreamBundle); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + + private void addBitstreamEntry(Context context, Item item, Bitstream bitstream, ZipOutputStream zos) + throws IOException { + + InputStream bitstreamContent = retrieveContent(context, bitstream); + + ZipEntry bitstreamEntry = new ZipEntry(getFolderName(item) + "/" + getBitstreamFileName(context, bitstream)); + zos.putNextEntry(bitstreamEntry); + + try { + writeBitstreamContent(bitstreamContent, zos); + } finally { + zos.closeEntry(); + } + + } + + private void writeBitstreamContent(InputStream content, ZipOutputStream zos) throws IOException { + byte[] bytes = new byte[1024]; + int length; + while ((length = content.read(bytes)) >= 0) { + zos.write(bytes, 0, length); + } + } + + private String getBitstreamFileName(Context context, Bitstream bitstream) { + String name = "bitstream_" + bitstream.getID().toString(); + return getBitstreamExtension(context, bitstream) + .map(extension -> name + "." + extension) + .orElse(name); + } + + private Optional getBitstreamExtension(Context context, Bitstream bitstream) { + try { + return bitstream.getFormat(context).getExtensions().stream().findFirst(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private InputStream retrieveContent(Context context, Bitstream bitstream) { + try { + return bitstreamStorageService.retrieve(context, bitstream); + } catch (SQLException | IOException e) { + throw new RuntimeException(e); + } + } + + private String getMetadataFileName() { + return metadataFileName; + } + + private String getFolderName(Item item) { + return item.getID().toString(); + } + + private boolean isMemberOfGroupNamed(Context context, EPerson ePerson, String groupName) { + try { + Group group = groupService.findByName(context, groupName); + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getMIMEType() { + return "application/octet-stream"; + } + + public void setCrosswalkMode(CrosswalkMode crosswalkMode) { + this.crosswalkMode = crosswalkMode; + } + + @Override + public CrosswalkMode getCrosswalkMode() { + return Optional.ofNullable(this.crosswalkMode).orElse(CrosswalkMode.MULTIPLE); + } + + private boolean hasExpectedEntityType(Item item) { + if (StringUtils.isBlank(entityType)) { + return true; + } + return entityType.equals(itemService.getEntityType(item)); + } + + @Override + public String getFileName() { + return getZipName(); + } + + public String getZipName() { + return zipName; + } + + public void setZipName(String zipName) { + this.zipName = zipName; + } + + public Optional getEntityType() { + return Optional.ofNullable(entityType); + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public StreamDisseminationCrosswalk getCrosswalk() { + return crosswalk; + } + + public void setCrosswalk(StreamDisseminationCrosswalk crosswalk) { + this.crosswalk = crosswalk; + } + + public String getBitstreamBundle() { + return bitstreamBundle; + } + + public void setBitstreamBundle(String bitstreamBundle) { + this.bitstreamBundle = bitstreamBundle; + } + + public void setMetadataFileName(String metadataFileName) { + this.metadataFileName = metadataFileName; + } + + public List getAllowedGroups() { + return allowedGroups; + } + + public void setAllowedGroups(List allowedGroups) { + this.allowedGroups = allowedGroups; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java index 4d33ba35c5e8..5745ec3e8ce8 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java @@ -45,6 +45,7 @@ public ItemExportFormat get(Context context, String id) { public List getAll(Context context) { return this.streamDissiminatorCrosswalkMapper.getAllItemExportCrosswalks().entrySet().stream() + .filter(entry -> entry.getValue().isAuthorized(context)) .map(entry -> buildItemExportFormat(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); @@ -58,6 +59,7 @@ public List byEntityTypeAndMolteplicity(Context context, Strin .entrySet().stream() .filter(entry -> hasSameMolteplicity(entry.getValue(), molteplicity)) .filter(entry -> hasSameEntityType(entry.getValue(), entityType)) + .filter(entry -> entry.getValue().isAuthorized(context)) .map(entry -> buildItemExportFormat(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); diff --git a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java index 85a4fd140e9a..fa1cbc38beae 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java @@ -210,6 +210,8 @@ public InputStream retrieve(Context context, Bitstream bitstream) public Bitstream getBitstreamByName(Item item, String bundleName, String bitstreamName) throws SQLException; + List getBitstreamByBundleName(Item item, String bundleName) throws SQLException; + public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLException; public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException; diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java new file mode 100644 index 000000000000..e824fef5a9b1 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java @@ -0,0 +1,256 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import static org.dspace.builder.CollectionBuilder.createCollection; +import static org.dspace.builder.CommunityBuilder.createCommunity; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; + +import org.apache.commons.collections.IteratorUtils; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.utils.DSpace; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class ZipItemExportCrosswalkIT extends AbstractIntegrationTestWithDatabase { + + private ZipItemExportCrosswalk zipItemExportCrosswalk; + + private Community community; + + private Collection collection; + + @Before + public void setup() throws SQLException, AuthorizeException { + + zipItemExportCrosswalk = new DSpace().getServiceManager() + .getServicesByType(ZipItemExportCrosswalk.class).get(0); + + context.turnOffAuthorisationSystem(); + community = createCommunity(context).build(); + collection = createCollection(context, community).build(); + context.restoreAuthSystemState(); + + } + + @Test + public void testItemsExportWithAdmin() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + Bitstream bitstream1 = createBitstream(item1, "test.txt", "This is a test"); + Bitstream bitstream2 = createBitstream(item3, "test.pdf", "Last test", "6 months"); + + String expectedEmbargo = LocalDate.now().plus(6, ChronoUnit.MONTHS).format(DateTimeFormatter.ISO_DATE); + + context.restoreAuthSystemState(); + + context.setCurrentUser(admin); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos); + } + + try (ZipFile zipFile = new ZipFile(tempZip)) { + + ZipEntry zipEntry = zipFile.getEntry(item1.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + String metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2022-01-01")); + assertThat(metsContent, + containsString("Test Item 1")); + assertThat(metsContent, containsString("Luca Giamminonni")); + assertThat(metsContent, + containsString("test@email.com")); + assertThat(metsContent, + containsString("test.txt")); + + zipEntry = zipFile.getEntry(item1.getID().toString() + "/bitstream_" + bitstream1.getID().toString()); + assertThat(zipEntry, notNullValue()); + assertThat(getZipEntryContent(zipFile, zipEntry), is("This is a test")); + + zipEntry = zipFile.getEntry(item2.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2022-03-01")); + assertThat(metsContent, + containsString("Test Item 2")); + assertThat(metsContent, containsString("Walter White")); + assertThat(metsContent, + containsString("test@email.com")); + + zipEntry = zipFile.getEntry(item3.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2020-01-01")); + assertThat(metsContent, + containsString("Test Item 3")); + assertThat(metsContent, containsString("Andrea Bollini")); + assertThat(metsContent, + containsString("test@email.com")); + assertThat(metsContent, containsString("")); + assertThat(metsContent, + containsString("test.pdf")); + + zipEntry = zipFile.getEntry(item3.getID().toString() + "/bitstream_" + bitstream2.getID().toString()); + assertThat(zipEntry, notNullValue()); + assertThat(getZipEntryContent(zipFile, zipEntry), is("Last test")); + + assertThat(getAllEntries(zipFile), hasSize(5)); + + } + + } + + @Test + public void testItemsExportWithCurators() throws Exception { + + context.turnOffAuthorisationSystem(); + + Group curators = GroupBuilder.createGroup(context) + .withName("Curators") + .build(); + + EPerson user = EPersonBuilder.createEPerson(context) + .withEmail("user@test.com") + .withGroupMembership(curators) + .build(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + context.restoreAuthSystemState(); + + context.setCurrentUser(user); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos); + } + + try (ZipFile zipFile = new ZipFile(tempZip)) { + assertThat(getAllEntries(zipFile), hasSize(3)); + } + + } + + @Test + public void testItemsExportWithNotAuthorizedUser() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + context.restoreAuthSystemState(); + + context.setCurrentUser(eperson); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + + AuthorizeException authorizeException = Assert.assertThrows(AuthorizeException.class, + () -> zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos)); + + assertThat(authorizeException.getMessage(), + is("The current user is not allowed to perform a zip item export")); + } + + } + + private Item createItem(String title, String issueDate, String author) { + return ItemBuilder.createItem(context, collection) + .withTitle(title) + .withIssueDate(issueDate) + .withAuthor(author) + .build(); + } + + private Bitstream createBitstream(Item item, String name, String content) throws Exception { + return BitstreamBuilder.createBitstream(context, item, getInputStream(content)) + .withName(name) + .build(); + } + + private Bitstream createBitstream(Item item, String name, String content, String embargoPeriod) throws Exception { + return BitstreamBuilder.createBitstream(context, item, getInputStream(content)) + .withName(name) + .withEmbargoPeriod(embargoPeriod) + .build(); + } + + private String getZipEntryContent(ZipFile zipFile, ZipEntry zipEntry) throws IOException { + return IOUtils.toString(zipFile.getInputStream(zipEntry), StandardCharsets.UTF_8); + } + + private InputStream getInputStream(String str) { + return IOUtils.toInputStream(str, StandardCharsets.UTF_8); + } + + @SuppressWarnings("unchecked") + private List getAllEntries(ZipFile zipFile) { + return IteratorUtils.toList(zipFile.entries().asIterator()); + } + +} diff --git a/dspace/config/spring/api/crosswalks.xml b/dspace/config/spring/api/crosswalks.xml index 34941fe7b0d4..9184a56482da 100644 --- a/dspace/config/spring/api/crosswalks.xml +++ b/dspace/config/spring/api/crosswalks.xml @@ -68,6 +68,7 @@ + @@ -509,7 +510,22 @@ - + + + + + + + + + Administrator + Curators + + + + + + From fcea03feb77a4ce3c87f9ae6527b0ef9f44edde4 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 30 Nov 2023 12:24:25 +0100 Subject: [PATCH 617/686] [DSC-1053] Fixes executor shutdown --- .../java/org/dspace/services/events/SystemEventService.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java index efcd31cb570e..5a4f118fcdc3 100644 --- a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java +++ b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java @@ -67,6 +67,9 @@ public SystemEventService(RequestService requestService) { public void shutdown() { this.requestInterceptor = null; // clear the interceptor this.listenersMap.clear(); + if (this.executorService != null && !this.executorService.isShutdown()) { + this.executorService.shutdown(); + } } From db3bb810742bbdb7a71713340379588204f12a0a Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 30 Nov 2023 17:52:02 +0100 Subject: [PATCH 618/686] [maven-release-plugin] prepare release dspace-cris-2023.02.00 --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index f92f485c1e70..aae4b282b654 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 9a2da1377f9b..08c2fe062265 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 46c3f39bcc2a..f3b7e23bf4e5 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index d36c9d236ec9..f5acab4dfc70 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index b17b22057943..e1ade345ba72 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index e20d92f247e1..a2add8447d8b 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index eab37034a510..2caba151f403 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 460c1d73c9c5..716361a5406c 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index ab9d6cf2f26b..8393d311b4ca 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 4eb482d9e93a..9dae87e503ba 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index f9734ad224f3..6e9daa881871 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index 61c1fb16d774..fa3083bda8d7 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 9cddbeb2a309..eeb283d96ea2 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 .. diff --git a/dspace/pom.xml b/dspace/pom.xml index df9f33f58021..8e9226149995 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 ../pom.xml diff --git a/pom.xml b/pom.xml index c3ad6e805230..a06395b3c0b3 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -958,14 +958,14 @@ org.dspace dspace-rest - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 jar classes org.dspace dspace-rest - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 war @@ -1116,69 +1116,69 @@ org.dspace dspace-api - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-api test-jar - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 test org.dspace.modules additions - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-sword - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-swordv2 - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-oai - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-services - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-server-webapp test-jar - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 test org.dspace dspace-rdf - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-iiif - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 org.dspace dspace-server-webapp - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 jar classes org.dspace dspace-server-webapp - cris-2023.02.00-SNAPSHOT + cris-2023.02.00 war @@ -2024,7 +2024,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2022.02.00 + dspace-cris-2023.02.00 From a5af9b33a80a84a2082710b4a16ab2407ba9266a Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 30 Nov 2023 17:52:08 +0100 Subject: [PATCH 619/686] [maven-release-plugin] prepare for next development iteration --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index aae4b282b654..530c30d52c83 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 08c2fe062265..a3de08e3d9cf 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index f3b7e23bf4e5..f2489890d233 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index f5acab4dfc70..d2160ac9ccf5 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index e1ade345ba72..f521f08a1411 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index a2add8447d8b..5cc96e573686 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 2caba151f403..49f16408277d 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 716361a5406c..bdd807db8185 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 8393d311b4ca..2fa0056ba7b3 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 9dae87e503ba..a8e149a66036 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 6e9daa881871..1ee925c3a30f 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index fa3083bda8d7..cbcd4a53d86b 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index eeb283d96ea2..1f1409094b56 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 8e9226149995..5192a9c1c8e9 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index a06395b3c0b3..4c3a4635dd33 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -958,14 +958,14 @@ org.dspace dspace-rest - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT jar classes org.dspace dspace-rest - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT war @@ -1116,69 +1116,69 @@ org.dspace dspace-api - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-api test-jar - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT test org.dspace.modules additions - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-sword - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-swordv2 - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-oai - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-services - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-server-webapp test-jar - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT test org.dspace dspace-rdf - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-iiif - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-server-webapp - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT jar classes org.dspace dspace-server-webapp - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT war @@ -2024,7 +2024,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2023.02.00 + dspace-cris-2022.02.00 From 780b3c235bd46f59efed109f41c4f67991174e2f Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Thu, 19 Oct 2023 12:26:41 +0200 Subject: [PATCH 620/686] [DSC-1307] Fix integration tests (REST) --- .../ReciprocalItemAuthorityConsumer.java | 35 +- .../service/BulkImportWorkbookBuilderIT.java | 21 +- .../ReciprocalItemAuthorityConsumerIT.java | 527 ++++++++++-------- .../PlainMetadataSignatureGeneratorIT.java | 12 +- ...iscoveryRestControllerMultiLanguageIT.java | 466 ++++++++-------- .../dspace/app/rest/PatchWithAuthorityIT.java | 50 +- .../app/rest/VocabularyRestRepositoryIT.java | 308 +++++----- .../org/dspace/authority/CrisConsumerIT.java | 92 +-- 8 files changed, 837 insertions(+), 674 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java b/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java index faab946daa18..05f4e8aea3fa 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumer.java @@ -41,29 +41,18 @@ public class ReciprocalItemAuthorityConsumer implements Consumer { private static final Logger log = LogManager.getLogger(ReciprocalItemAuthorityConsumer.class); - private final Map reciprocalMetadata = new ConcurrentHashMap<>(); + private final ConfigurationService configurationService = new DSpace().getConfigurationService(); + private final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private final Map reciprocalMetadataMap = new ConcurrentHashMap<>(); private final transient Set processedHandles = new HashSet<>(); private final IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName(IndexingService.class.getName(), IndexingService.class); - private final ItemService itemService; - - public ReciprocalItemAuthorityConsumer() { - ConfigurationService confService = new DSpace().getConfigurationService(); - itemService = ContentServiceFactory.getInstance().getItemService(); - for (String conf : confService.getPropertyKeys("ItemAuthority.reciprocalMetadata")) { - reciprocalMetadata.put(conf.substring("ItemAuthority.reciprocalMetadata.".length()), - confService.getProperty(conf)); - reciprocalMetadata.put(confService.getProperty(conf), - conf.substring("ItemAuthority.reciprocalMetadata.".length())); - } - } - @Override public void initialize() throws Exception { - // nothing + iniReciprocalMetadata(); } @Override @@ -79,11 +68,11 @@ public void consume(Context ctx, Event event) throws Exception { } else { processedHandles.add(item.getID()); } - if (!reciprocalMetadata.isEmpty()) { - for (String k : reciprocalMetadata.keySet()) { + if (!reciprocalMetadataMap.isEmpty()) { + for (String k : reciprocalMetadataMap.keySet()) { String entityType = k.split("\\.", 2)[0]; String metadata = k.split("\\.", 2)[1]; - checkItemRefs(ctx, item, entityType, metadata, reciprocalMetadata.get(k)); + checkItemRefs(ctx, item, entityType, metadata, reciprocalMetadataMap.get(k)); } } } finally { @@ -153,6 +142,16 @@ private void reindexItem(Context ctx, Item target) throws SQLException { } } + private void iniReciprocalMetadata() { + List properties = configurationService.getPropertyKeys("ItemAuthority.reciprocalMetadata"); + for (String conf : properties) { + reciprocalMetadataMap.put(conf.substring("ItemAuthority.reciprocalMetadata.".length()), + configurationService.getProperty(conf)); + reciprocalMetadataMap.put(configurationService.getProperty(conf), + conf.substring("ItemAuthority.reciprocalMetadata.".length())); + } + } + @Override public void end(Context ctx) throws Exception { processedHandles.clear(); diff --git a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java index a76642790704..a7006b6a8a10 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java @@ -128,6 +128,14 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { String authorId = author.getID().toString(); + Item testUser = ItemBuilder.createItem(context, persons) + .withTitle("Test User") + .build(); + + Item jesse = ItemBuilder.createItem(context, persons) + .withTitle("Jesse Pinkman") + .build(); + context.restoreAuthSystemState(); List metadata = new ArrayList<>(); @@ -153,9 +161,11 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { metadata.add(new MetadataValueDTO("dc", "date", "issued", "2022/02/15")); metadata.add(new MetadataValueDTO("dc", "type", null, "Book")); metadata.add(new MetadataValueDTO("dc", "language", "iso", "it")); - metadata.add(new MetadataValueDTO("dc", "contributor", "author", "Jesse Pinkman")); + metadata.add(new MetadataValueDTO("dc", "contributor", "author", null, "Jesse Pinkman", + jesse.getID().toString(), 600)); metadata.add(new MetadataValueDTO("oairecerif", "author", "affiliation", PLACEHOLDER_PARENT_METADATA_VALUE)); - metadata.add(new MetadataValueDTO("dc", "contributor", "author", "Test User")); + metadata.add(new MetadataValueDTO("dc", "contributor", "author", null, "Test User", + testUser.getID().toString(), 600)); metadata.add(new MetadataValueDTO("oairecerif", "author", "affiliation", "Company")); bitstreams = new ArrayList(); @@ -224,10 +234,11 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { with("dspace.entity.type", "Publication"), with("dc.type", "Book"), with("dc.language.iso", "it"), - with("dc.contributor.author", "Jesse Pinkman"), - with("dc.contributor.author", "Test User", 1), + with("dc.contributor.author", "Jesse Pinkman", jesse.getID().toString(), 600), + with("dc.contributor.author", "Test User", testUser.getID().toString(), 1, 600), with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE), - with("oairecerif.author.affiliation", "Company", 1))); + with("oairecerif.author.affiliation", "Company", 1) + )); assertThat(getItemBitstreamsByBundle(secondItem, "ORIGINAL"), contains( bitstreamWith("Bitstream 3", "Third bitstream content"))); diff --git a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java index 136a1be05239..352be9f2eede 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java @@ -7,7 +7,9 @@ */ package org.dspace.content.authority; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.UUID; import org.apache.solr.client.solrj.SolrQuery; @@ -22,11 +24,18 @@ import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataValue; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.discovery.MockSolrSearchCore; +import org.dspace.event.ConsumerProfile; +import org.dspace.event.Dispatcher; +import org.dspace.event.factory.EventServiceFactory; +import org.dspace.event.service.EventService; import org.dspace.kernel.ServiceManager; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.junit.Assert; import org.junit.Before; @@ -38,6 +47,12 @@ public class ReciprocalItemAuthorityConsumerIT extends AbstractIntegrationTestWi private MockSolrSearchCore searchService; + private ConfigurationService configurationService; + + private MetadataAuthorityService metadataAuthorityService; + + private EventService eventService; + @Override @Before public void setUp() throws Exception { @@ -46,6 +61,19 @@ public void setUp() throws Exception { ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class); + configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() + .getMetadataAuthorityService(); + eventService = EventServiceFactory.getInstance().getEventService() + + configurationService.setProperty("ItemAuthority.reciprocalMetadata.Publication.dc.relation.product", + "dc.relation.publication"); + configurationService.setProperty("ItemAuthority.reciprocalMetadata.Product.dc.relation.publication", + "dc.relation.product"); + metadataAuthorityService.clearCache(); + + initializeReciprocalConfiguration(); parentCommunity = CommunityBuilder.createCommunity(context) .withName("Parent Community") @@ -54,254 +82,309 @@ public void setUp() throws Exception { @Test public void testShouldCreatePublicationMetadataForProductItem() throws Exception { - String productTitle = "productTitle"; - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, productTitle) - .withType("product") - .build(); - - Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", - "product", null, productTitle, productItem.getID().toString(), Choices.CF_ACCEPTED) - .withType("publication") - .build(); - - List metadataValues = itemService.getMetadataByMetadataString( - productItem, "dc.relation.publication"); - - Assert.assertEquals(1, metadataValues.size()); - Assert.assertNotNull(metadataValues.get(0)); - Assert.assertEquals(publicationItem.getID().toString(), metadataValues.get(0).getAuthority()); - Assert.assertEquals(publicationItem.getName(), metadataValues.get(0).getValue()); - - SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); - Assert.assertEquals(1, solrDocumentList.size()); - SolrDocument solrDoc = solrDocumentList.get(0); - - List publicationTitles = (List) solrDoc.get("dc.relation.publication"); - Assert.assertEquals(1, publicationTitles.size()); - Assert.assertEquals(publicationItem.getName(), publicationTitles.get(0)); - - List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); - Assert.assertEquals(1, publicationAuthorities.size()); - Assert.assertEquals(publicationItem.getID().toString(), publicationAuthorities.get(0)); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + String productTitle = "productTitle"; + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, productTitle) + .withType("product") + .build(); + + Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", + "product", null, productTitle, productItem.getID().toString(), Choices.CF_ACCEPTED) + .withType("publication") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + productItem, "dc.relation.publication"); + + Assert.assertEquals(1, metadataValues.size()); + Assert.assertNotNull(metadataValues.get(0)); + Assert.assertEquals(publicationItem.getID().toString(), metadataValues.get(0).getAuthority()); + Assert.assertEquals(publicationItem.getName(), metadataValues.get(0).getValue()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertEquals(1, publicationTitles.size()); + Assert.assertEquals(publicationItem.getName(), publicationTitles.get(0)); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertEquals(1, publicationAuthorities.size()); + Assert.assertEquals(publicationItem.getID().toString(), publicationAuthorities.get(0)); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testShouldCreateProductMetadataForPublicationItem() throws Exception { - String publicationTitle = "publicationTitle"; - Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) - .withType("publication") - .build(); - - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", - null, publicationTitle, publicationItem.getID().toString(), Choices.CF_ACCEPTED) - .withType("product") - .build(); - - List metadataValues = itemService.getMetadataByMetadataString( - publicationItem, "dc.relation.product"); - - Assert.assertEquals(1, metadataValues.size()); - Assert.assertNotNull(metadataValues.get(0)); - Assert.assertEquals(productItem.getID().toString(), metadataValues.get(0).getAuthority()); - Assert.assertEquals(productItem.getName(), metadataValues.get(0).getValue()); - - SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); - Assert.assertEquals(1, solrDocumentList.size()); - SolrDocument solrDoc = solrDocumentList.get(0); - - List productTitles = (List) solrDoc.get("dc.relation.product"); - Assert.assertEquals(1, productTitles.size()); - Assert.assertEquals(productItem.getName(), productTitles.get(0)); - - List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); - Assert.assertEquals(1, productAuthorities.size()); - Assert.assertEquals(productItem.getID().toString(), productAuthorities.get(0)); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + String publicationTitle = "publicationTitle"; + Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) + .withType("publication") + .build(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", + null, publicationTitle, publicationItem.getID().toString(), Choices.CF_ACCEPTED) + .withType("product") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + publicationItem, "dc.relation.product"); + + Assert.assertEquals(1, metadataValues.size()); + Assert.assertNotNull(metadataValues.get(0)); + Assert.assertEquals(productItem.getID().toString(), metadataValues.get(0).getAuthority()); + Assert.assertEquals(productItem.getName(), metadataValues.get(0).getValue()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertEquals(1, productTitles.size()); + Assert.assertEquals(productItem.getName(), productTitles.get(0)); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertEquals(1, productAuthorities.size()); + Assert.assertEquals(productItem.getID().toString(), productAuthorities.get(0)); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemMentioningNotExistingAuthorityIsCreated() throws Exception { - UUID notExistingItemId = UUID.fromString("803762b5-6f73-4870-b941-adf3c5626f04"); - Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") - .withType("publication") - .build(); - - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", - null, "notExistingPublicationTitle", notExistingItemId.toString(), Choices.CF_ACCEPTED) - .withType("product") - .build(); - - List metadataValues = itemService.getMetadataByMetadataString( - publicationItem, "dc.relation.product"); - Assert.assertEquals(0, metadataValues.size()); - - SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); - Assert.assertEquals(1, solrDocumentList.size()); - SolrDocument solrDoc = solrDocumentList.get(0); - - List productTitles = (List) solrDoc.get("dc.relation.product"); - Assert.assertNull(productTitles); - - List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); - Assert.assertNull(productAuthorities); - - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + + UUID notExistingItemId = UUID.fromString("803762b5-6f73-4870-b941-adf3c5626f04"); + Collection publicationItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicationItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "publicationTitle") + .withType("publication") + .build(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", + null, "notExistingPublicationTitle", notExistingItemId.toString(), Choices.CF_ACCEPTED) + .withType("product") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + publicationItem, "dc.relation.product"); + Assert.assertEquals(0, metadataValues.size()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertNull(productTitles); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertNull(productAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemMentioningInvalidAuthorityIsCreated() throws Exception { - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", - null, "notExistingPublicationTitle", "invalidAuthorityUUID", Choices.CF_ACCEPTED) - .withType("product") - .build(); - - SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); - Assert.assertEquals(1, solrDocumentList.size()); - SolrDocument solrDoc = solrDocumentList.get(0); - - List publicationTitles = (List) solrDoc.get("dc.relation.publication"); - Assert.assertNull(publicationTitles); - - List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); - Assert.assertNull(publicationAuthorities); - - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "product", + null, "notExistingPublicationTitle", "invalidAuthorityUUID", Choices.CF_ACCEPTED) + .withType("product") + .build(); + + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertNull(publicationTitles); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertNull(publicationAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemWithoutAuthorityIsCreated() throws Exception { - String publicationTitle = "publicationTitle"; - Collection publicatoinItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("publication") - .withName("test_collection").build(); - Item publicationItem = ItemBuilder.createItem(context, publicatoinItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) - .withType("publication") - .build(); - - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", publicationTitle) - .withType("product") - .build(); - - List metadataValues = itemService.getMetadataByMetadataString( - publicationItem, "dc.relation.product"); - Assert.assertEquals(0, metadataValues.size()); - - SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); - Assert.assertEquals(1, solrDocumentList.size()); - SolrDocument solrDoc = solrDocumentList.get(0); - - List productTitles = (List) solrDoc.get("dc.relation.product"); - Assert.assertNull(productTitles); - - List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); - Assert.assertNull(productAuthorities); - - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + String publicationTitle = "publicationTitle"; + Collection publicatoinItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("publication") + .withName("test_collection").build(); + Item publicationItem = ItemBuilder.createItem(context, publicatoinItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, publicationTitle) + .withType("publication") + .build(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withMetadata(MetadataSchemaEnum.DC.getName(), "relation", "publication", publicationTitle) + .withType("product") + .build(); + + List metadataValues = itemService.getMetadataByMetadataString( + publicationItem, "dc.relation.product"); + Assert.assertEquals(0, metadataValues.size()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(publicationItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List productTitles = (List) solrDoc.get("dc.relation.product"); + Assert.assertNull(productTitles); + + List productAuthorities = (List) solrDoc.get("dc.relation.product_authority"); + Assert.assertNull(productAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void testItemWithoutPublicationMetadataIsCreated() throws Exception { - Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("product") - .withName("test_collection").build(); - Item productItem = ItemBuilder.createItem(context, productItemCollection) - .withPersonIdentifierFirstName("test_first_name") - .withPersonIdentifierLastName("test_second_name") - .withScopusAuthorIdentifier("test_author_identifier") - .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") - .withType("product") - .build(); - - List productItemMetadataValues = itemService.getMetadataByMetadataString( - productItem, "dc.relation.publication"); - Assert.assertEquals(0, productItemMetadataValues.size()); - - SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); - Assert.assertEquals(1, solrDocumentList.size()); - SolrDocument solrDoc = solrDocumentList.get(0); - - List publicationTitles = (List) solrDoc.get("dc.relation.publication"); - Assert.assertNull(publicationTitles); - - List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); - Assert.assertNull(publicationAuthorities); - - Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); - Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + try { + configurationService.setProperty("authority.controlled.dc.relation.product", "true"); + metadataAuthorityService.clearCache(); + + Collection productItemCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("product") + .withName("test_collection").build(); + Item productItem = ItemBuilder.createItem(context, productItemCollection) + .withPersonIdentifierFirstName("test_first_name") + .withPersonIdentifierLastName("test_second_name") + .withScopusAuthorIdentifier("test_author_identifier") + .withMetadata(MetadataSchemaEnum.DC.getName(), "title", null, "productTitle") + .withType("product") + .build(); + + List productItemMetadataValues = itemService.getMetadataByMetadataString( + productItem, "dc.relation.publication"); + Assert.assertEquals(0, productItemMetadataValues.size()); + + SolrDocumentList solrDocumentList = getSolrDocumentList(productItem); + Assert.assertEquals(1, solrDocumentList.size()); + SolrDocument solrDoc = solrDocumentList.get(0); + + List publicationTitles = (List) solrDoc.get("dc.relation.publication"); + Assert.assertNull(publicationTitles); + + List publicationAuthorities = (List) solrDoc.get("dc.relation.publication_authority"); + Assert.assertNull(publicationAuthorities); + + Item foundProductItem = itemService.findByIdOrLegacyId(new Context(), productItem.getID().toString()); + Assert.assertEquals(productItem.getID(), foundProductItem.getID()); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.product", "false"); + metadataAuthorityService.clearCache(); + } } - public SolrDocumentList getSolrDocumentList(Item item) throws Exception { + private SolrDocumentList getSolrDocumentList(Item item) throws Exception { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery("search.resourceid:" + item.getID()); QueryResponse queryResponse = searchService.getSolr().query(solrQuery); return queryResponse.getResults(); } + private void initializeReciprocalConfiguration() throws Exception { + Dispatcher dispatcher = eventService.getDispatcher("default"); + Object object = dispatcher.getConsumers(); + if (object instanceof Map) { + Map consumers = (LinkedHashMap) dispatcher.getConsumers(); + + ConsumerProfile consumerProfile = consumers.get("reciprocal"); + consumerProfile.getConsumer().initialize(); + } + } } \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java index af50c5d3facb..045b10d701bd 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java @@ -134,11 +134,15 @@ public void testSignatureGenerationWithSingleMetadataValue() { public void testSignatureGenerationWithManyEqualsMetadataValues() { context.turnOffAuthorisationSystem(); + Item person = ItemBuilder.createItem(context, collection) + .withTitle("Jesse Pinkman") + .build(); + Item item = ItemBuilder.createItem(context, collection) .withTitle("Item title") .withDescription("Description") - .withAuthor("Jesse Pinkman") - .withAuthor("Jesse Pinkman") + .withAuthor("Jesse Pinkman", person.getID().toString()) + .withAuthor("Jesse Pinkman", person.getID().toString()) .build(); context.restoreAuthSystemState(); @@ -146,12 +150,12 @@ public void testSignatureGenerationWithManyEqualsMetadataValues() { MetadataValue firstAuthor = getMetadata(item, "dc.contributor.author", 0); String firstSignature = generator.generate(context, List.of(firstAuthor)); assertThat(firstSignature, notNullValue()); - assertThat(firstSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + assertThat(firstSignature, equalTo("dc.contributor.author::Jesse Pinkman::" + person.getID().toString())); MetadataValue secondAuthor = getMetadata(item, "dc.contributor.author", 1); String secondSignature = generator.generate(context, List.of(secondAuthor)); assertThat(secondSignature, notNullValue()); - assertThat(secondSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + assertThat(secondSignature, equalTo("dc.contributor.author::Jesse Pinkman::" + person.getID().toString())); List metadataValues = generator.findBySignature(context, item, firstSignature); assertThat(metadataValues, hasSize(1)); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java index 0299593830ee..aab6da9a5cec 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java @@ -281,261 +281,257 @@ public void discoverFacetsLanguageWithPrefixTest() throws Exception { public void discoverFacetsTypesTest() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType( - "Resource Types::text::journal::journal article::software paper", - "publication-coar-types:c_7bab" - ) - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "art")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", - containsInAnyOrder( - FacetValueMatcher.entryTypes( - "articolo sul software","publication-coar-types:c_7bab" - ) - ) - )); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "про")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", - containsInAnyOrder( - FacetValueMatcher.entryTypes( - "програмна стаття", - "publication-coar-types:c_7bab" - ) - ) - )); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "srsc:SCB14") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "matem")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "мат")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithoutAuthority() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithUnknownAuthority() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "srsc:UNKNOWN") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithUnknownAuthorityName() throws Exception { - context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en","uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "UNKNOWN:VALUE") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void discoverFacetsTypesTestWithWrongAuthorityFormat() throws Exception { context.turnOffAuthorisationSystem(); - String[] supportedLanguage = { "en", "uk", "it" }; - configurationService.setProperty("webui.supported.locales", supportedLanguage); - metadataAuthorityService.clearCache(); - choiceAuthorityService.clearCache(); - - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - - Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") - .withName("Collection 1") - .withEntityType("Publication") - .build(); - ItemBuilder.createItem(context, col1) - .withTitle("Test 1") - .withIssueDate("2010-10-17") - .withAuthor("Testing, Works") - .withType("Research Subject Categories::MATEMATICA", "authority") - .build(); - - context.restoreAuthSystemState(); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", Locale.ITALIAN.getLanguage()) - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "authority")))); - - getClient().perform(get("/api/discover/facets/types") - .header("Accept-Language", "uk") - .param("configuration", "multilanguage-types") - .param("prefix", "research")) - .andExpect(jsonPath("$.type", is("discover"))) - .andExpect(jsonPath("$.name", is("types"))) - .andExpect(jsonPath("$.facetType", is("text"))) - .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))) - .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryTypes("Research Subject Categories::MATEMATICA", "authority")))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String[] supportedLanguage = {"en", "uk", "it"}; + configurationService.setProperty("webui.supported.locales", supportedLanguage); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity, "123456789/language-test-1") + .withName("Collection 1") + .withEntityType("Publication") + .build(); + + ItemBuilder.createItem(context, col1) + .withTitle("Test 1") + .withIssueDate("2010-10-17") + .withAuthor("Testing, Works") + .withType("Research Subject Categories::MATEMATICA", "authority") + .build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", Locale.ITALIAN.getLanguage()) + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + + getClient().perform(get("/api/discover/facets/types") + .header("Accept-Language", "uk") + .param("configuration", "multilanguage-types") + .param("prefix", "research")) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$.name", is("types"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets/types"))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } -} +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java index 21d48c1f2309..ddda2c97cc57 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PatchWithAuthorityIT.java @@ -28,6 +28,10 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.MetadataAuthorityService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.junit.Test; /** @@ -40,6 +44,12 @@ public class PatchWithAuthorityIT extends AbstractControllerIntegrationTest { private WorkspaceItem workspaceItem; + private final ConfigurationService configurationService = DSpaceServicesFactory + .getInstance().getConfigurationService(); + + private final MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory + .getInstance().getMetadataAuthorityService(); + @Override public void setUp() throws Exception { super.setUp(); @@ -62,22 +72,30 @@ public void setUp() throws Exception { public void addValueFromControlledVocabularyHasAuthorityStored() throws Exception { String authToken = getAuthToken(admin.getEmail(), password); - MetadataValueRest value = new MetadataValueRest("dataset"); - value.setAuthority("c_ddb1"); - value.setConfidence(600); - List operations = - singletonList(new AddOperation("/sections/publication/dc.type", - singletonList(value))); - - getClient(authToken).perform(patch("/api/submission/workspaceitems/" + workspaceItem.getID()) - .contentType(MediaType.APPLICATION_JSON) - .content(getPatchContent(operations))) - .andExpect(status().isOk()); - - Item item = context.reloadEntity(workspaceItem).getItem(); - - assertThat(item.getMetadata(), hasItem(with("dc.type", "dataset", null, - "c_ddb1", 0, 600))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + MetadataValueRest value = new MetadataValueRest("dataset"); + value.setAuthority("c_ddb1"); + value.setConfidence(600); + List operations = + singletonList(new AddOperation("/sections/publication/dc.type", + singletonList(value))); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + workspaceItem.getID()) + .contentType(MediaType.APPLICATION_JSON) + .content(getPatchContent(operations))) + .andExpect(status().isOk()); + + Item item = context.reloadEntity(workspaceItem).getItem(); + + assertThat(item.getMetadata(), hasItem(with("dc.type", "dataset", null, + "c_ddb1", 0, 600))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java index 1ff29a2ba228..1eedda87f990 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/VocabularyRestRepositoryIT.java @@ -40,6 +40,7 @@ import org.dspace.content.Item; import org.dspace.content.authority.DCInputAuthority; import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.edit.EditItem; import org.dspace.core.service.PluginService; import org.dspace.services.ConfigurationService; @@ -58,6 +59,9 @@ public class VocabularyRestRepositoryIT extends AbstractControllerIntegrationTes @Autowired private ConfigurationService configurationService; + @Autowired + private MetadataAuthorityService metadataAuthorityService; + @Autowired private SubmissionFormRestRepository submissionFormRestRepository; @@ -149,22 +153,22 @@ public void destroy() throws Exception { public void findAllTest() throws Exception { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get("/api/submission/vocabularies")) - .andExpect(status().isOk()) + .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.vocabularies", Matchers.containsInAnyOrder( VocabularyMatcher.matchProperties("srsc", "srsc", false, true), + VocabularyMatcher.matchProperties("common_iso_languages", "common_iso_languages", true, false), + VocabularyMatcher.matchProperties("SRPublisher", "SRPublisher", false, false), + VocabularyMatcher.matchProperties("patent_types", "patent_types", true, false), + VocabularyMatcher.matchProperties("types", "types", false, true), + VocabularyMatcher.matchProperties("gender", "gender", true, false), + VocabularyMatcher.matchProperties("SolrAuthorAuthority", "SolrAuthorAuthority", false, false), + VocabularyMatcher.matchProperties("SRJournalTitle", "SRJournalTitle", false, false), VocabularyMatcher.matchProperties("common_types", "common_types", true, false), - VocabularyMatcher.matchProperties("common_iso_languages", "common_iso_languages", true , false), - VocabularyMatcher.matchProperties("SolrAuthorAuthority", "SolrAuthorAuthority", false , false), - VocabularyMatcher.matchProperties("patent_types", "patent_types", true , false), - VocabularyMatcher.matchProperties("types", "types", false , true), - VocabularyMatcher.matchProperties("gender", "gender", true , false), - VocabularyMatcher.matchProperties("SRPublisher", "SRPublisher", false , false), - VocabularyMatcher.matchProperties("SRJournalTitle", "SRJournalTitle", false , false), - VocabularyMatcher.matchProperties("publication-coar-types", "publication-coar-types", false , true) - ))) - .andExpect(jsonPath("$._links.self.href", - Matchers.containsString("api/submission/vocabularies"))) - .andExpect(jsonPath("$.page.totalElements", is(10))); + VocabularyMatcher.matchProperties("publication-coar-types", "publication-coar-types", false, true) + ))) + .andExpect(jsonPath("$._links.self.href", + Matchers.containsString("api/submission/vocabularies"))) + .andExpect(jsonPath("$.page.totalElements", is(10))); } @Test @@ -506,140 +510,160 @@ public void linkedEntitiesWithFilterAndEntryIdTest() throws Exception { @Test public void controlledVocabularyWithHierarchyStoreSetTrueTest() throws Exception { context.turnOffAuthorisationSystem(); - String vocabularyName = "publication-coar-types"; - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Root Community") - .build(); - - Collection col = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("Publication") - .withName("Collection 1") - .build(); - - Item itemA = ItemBuilder.createItem(context, col) - .withTitle("Test Item A") - .withIssueDate("2023-04-04") - .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") - .build(); - - EditItem editItem = new EditItem(context, itemA); - - context.restoreAuthSystemState(); - - String tokenAdmin = getAuthToken(admin.getEmail(), password); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("Resource Types::text::book::book part")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); - - AtomicReference selectedLeafValue = new AtomicReference<>(); - AtomicReference selectedLeafauthority = new AtomicReference<>(); - - getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") - .param("metadata", "dc.type") - .param("entryID", vocabularyName + ":c_b239")) - .andExpect(status().isOk()) - .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].value"))) - .andDo(result -> selectedLeafauthority.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].authority"))); - - List operations = new ArrayList(); - Map value = new HashMap(); - value.put("value", selectedLeafValue.get()); - value.put("authority", selectedLeafauthority.get()); - value.put("confidence", "600"); - operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); - - String patchBody = getPatchContent(operations); - getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") - .content(patchBody) - .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) - .andExpect(status().isOk()); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("text::journal::editorial")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String vocabularyName = "publication-coar-types"; + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Root Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Publication") + .withName("Collection 1") + .build(); + + Item itemA = ItemBuilder.createItem(context, col) + .withTitle("Test Item A") + .withIssueDate("2023-04-04") + .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") + .build(); + + EditItem editItem = new EditItem(context, itemA); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath( + "$['dc.type'][0].value", is("Resource Types::text::book::book part")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + + AtomicReference selectedLeafValue = new AtomicReference<>(); + AtomicReference selectedLeafauthority = new AtomicReference<>(); + + getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") + .param("metadata", "dc.type") + .param("entryID", vocabularyName + ":c_b239")) + .andExpect(status().isOk()) + .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].value"))) + .andDo(result -> selectedLeafauthority.set( + read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].authority"))); + + List operations = new ArrayList(); + Map value = new HashMap(); + value.put("value", selectedLeafValue.get()); + value.put("authority", selectedLeafauthority.get()); + value.put("confidence", "600"); + operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); + + String patchBody = getPatchContent(operations); + getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath("$['dc.type'][0].value", is("text::journal::editorial")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test public void controlledVocabularyWithHierarchyStoreSetFalseTest() throws Exception { context.turnOffAuthorisationSystem(); - String vocabularyName = "publication-coar-types"; - configurationService.setProperty("vocabulary.plugin." + vocabularyName + ".hierarchy.store", false); - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Root Community") - .build(); - - Collection col = CollectionBuilder.createCollection(context, parentCommunity) - .withEntityType("Publication") - .withName("Collection 1") - .build(); - - Item itemA = ItemBuilder.createItem(context, col) - .withTitle("Test Item A") - .withIssueDate("2023-04-04") - .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") - .build(); - - EditItem editItem = new EditItem(context, itemA); - - context.restoreAuthSystemState(); - - String tokenAdmin = getAuthToken(admin.getEmail(), password); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("Resource Types::text::book::book part")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); - - AtomicReference selectedLeafValue = new AtomicReference<>(); - AtomicReference selectedLeafauthority = new AtomicReference<>(); - - getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") - .param("metadata", "dc.type") - .param("entryID", vocabularyName + ":c_b239")) - .andExpect(status().isOk()) - .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].value"))) - .andDo(result -> selectedLeafauthority.set(read(result.getResponse().getContentAsString(), - "$._embedded.entries[0].authority"))); - - List operations = new ArrayList(); - Map value = new HashMap(); - value.put("value", selectedLeafValue.get()); - value.put("authority", selectedLeafauthority.get()); - value.put("confidence", "600"); - operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); - - String patchBody = getPatchContent(operations); - getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") - .content(patchBody) - .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) - .andExpect(status().isOk()); - - getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.metadata", Matchers.allOf( - hasJsonPath("$['dc.title'][0].value", is("Test Item A")), - hasJsonPath("$['dc.type'][0].value", is("editorial")), - hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), - hasJsonPath("$['dc.type'][0].confidence", is(600)) - ))); + try { + configurationService.setProperty("authority.controlled.dc.type", "true"); + metadataAuthorityService.clearCache(); + + String vocabularyName = "publication-coar-types"; + configurationService.setProperty("vocabulary.plugin." + vocabularyName + ".hierarchy.store", false); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Root Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Publication") + .withName("Collection 1") + .build(); + + Item itemA = ItemBuilder.createItem(context, col) + .withTitle("Test Item A") + .withIssueDate("2023-04-04") + .withType("Resource Types::text::book::book part", vocabularyName + ":c_3248") + .build(); + + EditItem editItem = new EditItem(context, itemA); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath("$['dc.type'][0].value", + is("Resource Types::text::book::book part")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_3248")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + + AtomicReference selectedLeafValue = new AtomicReference<>(); + AtomicReference selectedLeafauthority = new AtomicReference<>(); + + getClient(tokenAdmin).perform(get("/api/submission/vocabularies/" + vocabularyName + "/entries") + .param("metadata", "dc.type") + .param("entryID", vocabularyName + ":c_b239")) + .andExpect(status().isOk()) + .andDo(result -> selectedLeafValue.set(read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].value"))) + .andDo(result -> selectedLeafauthority.set( + read(result.getResponse().getContentAsString(), + "$._embedded.entries[0].authority"))); + + List operations = new ArrayList(); + Map value = new HashMap(); + value.put("value", selectedLeafValue.get()); + value.put("authority", selectedLeafauthority.get()); + value.put("confidence", "600"); + operations.add(new ReplaceOperation("/sections/controlled-vocabulary-test/dc.type/0", value)); + + String patchBody = getPatchContent(operations); + getClient(tokenAdmin).perform(patch("/api/core/edititems/" + editItem.getID() + ":MODE-VOC") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + getClient(tokenAdmin).perform(get("/api/core/items/" + itemA.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.metadata", Matchers.allOf( + hasJsonPath("$['dc.title'][0].value", is("Test Item A")), + hasJsonPath("$['dc.type'][0].value", is("editorial")), + hasJsonPath("$['dc.type'][0].authority", is(vocabularyName + ":c_b239")), + hasJsonPath("$['dc.type'][0].confidence", is(600)) + ))); + } finally { + configurationService.setProperty("authority.controlled.dc.type", "false"); + metadataAuthorityService.clearCache(); + } } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java b/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java index 3cbbe6850dc9..2d887caa7b2c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/authority/CrisConsumerIT.java @@ -57,6 +57,8 @@ import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; +import org.dspace.content.authority.ChoiceAuthorityServiceImpl; +import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.service.ItemService; import org.dspace.eperson.EPerson; import org.dspace.external.OrcidRestConnector; @@ -90,6 +92,9 @@ public class CrisConsumerIT extends AbstractControllerIntegrationTest { @Autowired private ConfigurationService configurationService; + @Autowired + private ChoiceAuthorityServiceImpl choiceAuthorityService; + @Value("classpath:org/dspace/app/rest/simple-article.pdf") private Resource simpleArticle; @@ -111,6 +116,9 @@ public class CrisConsumerIT extends AbstractControllerIntegrationTest { @Autowired private OrcidV3AuthorDataProvider orcidV3AuthorDataProvider; + @Autowired + private MetadataAuthorityService metadataAuthorityService; + @Override public void setUp() throws Exception { super.setUp(); @@ -1127,54 +1135,74 @@ public void testOrcidImportFiller() throws Exception { @Test public void testSherpaImportFiller() throws Exception { - String issn = "2731-0582"; + try { + configurationService.setProperty("authority.controlled.dc.relation.journal", "true"); + configurationService.setProperty("choices.plugin.dc.relation.journal", "JournalAuthority"); + configurationService.setProperty("choices.presentation.dc.relation.journal", "suggest"); + configurationService.setProperty("choices.closed.dc.relation.journal", "true"); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.entityType", "Journal"); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.relationshipType", "Journal"); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); - context.turnOffAuthorisationSystem(); + String issn = "2731-0582"; - Collection journals = createCollection("Collection of journals", "Journal", subCommunity); + context.turnOffAuthorisationSystem(); - Item publication = ItemBuilder.createItem(context, publicationCollection) - .withTitle("Test Publication") - .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) - .build(); + Collection journals = createCollection("Collection of journals", "Journal", subCommunity); - context.commit(); + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test Publication") + .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) + .build(); - context.restoreAuthSystemState(); + context.commit(); - String authToken = getAuthToken(submitter.getEmail(), password); - ItemRest item = getItemViaRestByID(authToken, publication.getID()); + context.restoreAuthSystemState(); - MetadataValueRest journalMetadata = findSingleMetadata(item, "dc.relation.journal"); + String authToken = getAuthToken(submitter.getEmail(), password); + ItemRest item = getItemViaRestByID(authToken, publication.getID()); - UUID journalId = UUIDUtils.fromString(journalMetadata.getAuthority()); - assertThat(journalId, notNullValue()); + MetadataValueRest journalMetadata = findSingleMetadata(item, "dc.relation.journal"); - Item journal = itemService.find(context, journalId); - assertThat(journal, notNullValue()); - assertThat(journal.getOwningCollection(), is(journals)); - assertThat(journal.getMetadata(), hasItems( - with("dc.title", "Nature Synthesis"), - with("dc.identifier.issn", issn), - with("cris.sourceId", "ISSN::" + issn))); + UUID journalId = UUIDUtils.fromString(journalMetadata.getAuthority()); + assertThat(journalId, notNullValue()); - context.turnOffAuthorisationSystem(); + Item journal = itemService.find(context, journalId); + assertThat(journal, notNullValue()); + assertThat(journal.getOwningCollection(), is(journals)); + assertThat(journal.getMetadata(), hasItems( + with("dc.title", "Nature Synthesis"), + with("dc.identifier.issn", issn), + with("cris.sourceId", "ISSN::" + issn))); - publicationCollection = context.reloadEntity(publicationCollection); + context.turnOffAuthorisationSystem(); - Item anotherPublication = ItemBuilder.createItem(context, publicationCollection) - .withTitle("Test Publication 2") - .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) - .build(); + publicationCollection = context.reloadEntity(publicationCollection); - context.commit(); + Item anotherPublication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test Publication 2") + .withRelationJournal("Nature Synthesis", "will be generated::ISSN::" + issn) + .build(); - context.restoreAuthSystemState(); + context.commit(); + + context.restoreAuthSystemState(); - item = getItemViaRestByID(authToken, anotherPublication.getID()); - journalMetadata = findSingleMetadata(item, "dc.relation.journal"); - assertThat(UUIDUtils.fromString(journalMetadata.getAuthority()), is(journal.getID())); + item = getItemViaRestByID(authToken, anotherPublication.getID()); + journalMetadata = findSingleMetadata(item, "dc.relation.journal"); + assertThat(UUIDUtils.fromString(journalMetadata.getAuthority()), is(journal.getID())); + } finally { + configurationService.setProperty("authority.controlled.dc.relation.journal", "false"); + configurationService.setProperty("choices.plugin.dc.relation.journal", null); + configurationService.setProperty("choices.presentation.dc.relation.journal", null); + configurationService.setProperty("choices.closed.dc.relation.journal", null); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.entityType", null); + configurationService.setProperty("cris.ItemAuthority.JournalAuthority.relationshipType", null); + metadataAuthorityService.clearCache(); + choiceAuthorityService.clearCache(); + } } private ItemRest getItemViaRestByID(String authToken, UUID id) throws Exception { From 9135d015696135d883af6c185c2bc98ad1daca4c Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Sat, 25 Nov 2023 10:31:14 +0100 Subject: [PATCH 621/686] [DSC-1307] - Fix tests in DiscoveryRestControllerMultiLanguageIT --- .../content/authority/ReciprocalItemAuthorityConsumerIT.java | 2 +- .../dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java index 352be9f2eede..ad5ca83105bf 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/ReciprocalItemAuthorityConsumerIT.java @@ -65,7 +65,7 @@ public void setUp() throws Exception { .getConfigurationService(); metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() .getMetadataAuthorityService(); - eventService = EventServiceFactory.getInstance().getEventService() + eventService = EventServiceFactory.getInstance().getEventService(); configurationService.setProperty("ItemAuthority.reciprocalMetadata.Publication.dc.relation.product", "dc.relation.publication"); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java index aab6da9a5cec..60f2c183bc45 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerMultiLanguageIT.java @@ -434,6 +434,7 @@ public void discoverFacetsTypesTestWithUnknownAuthority() throws Exception { @Test public void discoverFacetsTypesTestWithUnknownAuthorityName() throws Exception { + context.turnOffAuthorisationSystem(); try { configurationService.setProperty("authority.controlled.dc.type", "true"); metadataAuthorityService.clearCache(); From b00d56a698c6fcff193b09a69d5d223b9095acd5 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Mon, 18 Dec 2023 18:34:09 +0100 Subject: [PATCH 622/686] [DSC-1438] Remove green orange configurations --- .../dspaceFolder/config/item-submission.xml | 22 + .../dspaceFolder/config/modules/authority.cfg | 285 ++ .../dspaceFolder/config/submission-forms.xml | 3935 +++++++++-------- .../dspaceFolder/config/item-submission.xml | 561 +-- dspace/config/item-submission.xml | 20 - dspace/config/modules/authority.cfg | 4 +- dspace/config/submission-forms.xml | 279 -- 7 files changed, 2719 insertions(+), 2387 deletions(-) create mode 100644 dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 64fbc551def6..d94d1145ee45 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -285,6 +285,17 @@ submission + + submit.progressbar.describe.green + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.green + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + @@ -457,6 +468,17 @@ + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg b/dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg new file mode 100644 index 000000000000..10e4bc36fbb9 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg @@ -0,0 +1,285 @@ +#---------------------------------------------------------------# +#----------------- AUTHORITY CONFIGURATIONS --------------------# +#---------------------------------------------------------------# +# These configs are used by the authority framework # +#---------------------------------------------------------------# + +## The DCInputAuthority plugin is automatically configured with every +## value-pairs element in input-forms.xml, namely: +## common_identifiers, common_types, common_iso_languages +## +## The DSpaceControlledVocabulary plugin is automatically configured +## with every *.xml file in [dspace]/config/controlled-vocabularies, +## and creates a plugin instance for each, using base filename as the name. +## eg: nsi, srsc. +## Each DSpaceControlledVocabulary plugin comes with three configuration options: +# vocabulary.plugin._plugin_.hierarchy.store = # default: true +# vocabulary.plugin._plugin_.hierarchy.suggest = # default: false +# vocabulary.plugin._plugin_.delimiter = "" # default: "::" +## +## An example using "srsc" can be found later in this section + +plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \ + org.dspace.content.authority.DCInputAuthority, \ + org.dspace.content.authority.DSpaceControlledVocabulary + + ## +## This sets the default lowest confidence level at which a metadata value is included +## in an authority-controlled browse (and search) index. It is a symbolic +## keyword, one of the following values (listed in descending order): +## accepted +## uncertain +## ambiguous +## notfound +## failed +## rejected +## novalue +## unset +## See manual or org.dspace.content.authority.Choices source for descriptions. +authority.minconfidence = ambiguous + +# Configuration settings for ORCID based authority control. +# Uncomment the lines below to enable configuration +#choices.plugin.dc.contributor.author = SolrAuthorAuthority +#choices.presentation.dc.contributor.author = authorLookup +#authority.controlled.dc.contributor.author = true +#authority.author.indexer.field.1=dc.contributor.author + +## +## This sets the lowest confidence level at which a metadata value is included +## in an authority-controlled browse (and search) index. It is a symbolic +## keyword from the same set as for the default "authority.minconfidence" +#authority.minconfidence.dc.contributor.author = accepted + +## demo: subject code autocomplete, using srsc as authority +## (DSpaceControlledVocabulary plugin must be enabled) +## Warning: when enabling this feature any controlled vocabulary configuration in the input-forms.xml for the metadata field will be overridden. +#vocabulary.plugin.srsc.hierarchy.store = true +#vocabulary.plugin.srsc.hierarchy.suggest = true +#vocabulary.plugin.srsc.delimiter = "::" + +# publisher name lookup through SHERPA/RoMEO: +#choices.plugin.dc.publisher = SRPublisher +#choices.presentation.dc.publisher = suggest + +## demo: journal title lookup, with ISSN as authority +#choices.plugin.dc.title.alternative = SRJournalTitle +#choices.presentation.dc.title.alternative = suggest +#authority.controlled.dc.title.alternative = true + +##### Authority Control Settings ##### +#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ +# org.dspace.content.authority.SampleAuthority = Sample, \ +# org.dspace.content.authority.SHERPARoMEOPublisher = SRPublisher, \ +# org.dspace.content.authority.SHERPARoMEOJournalTitle = SRJournalTitle, \ +# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority + +#Uncomment to enable ORCID authority control +#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ +# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority + +##### Authority Control Settings ##### +plugin.named.org.dspace.content.authority.ChoiceAuthority = \ + org.dspace.content.authority.OrcidAuthority = AuthorAuthority,\ + org.dspace.content.authority.OrcidAuthority = EditorAuthority,\ + org.dspace.content.authority.OrcidAuthority = AuthorStrictMatchAuthority, \ + org.dspace.content.authority.OrcidAuthority = AuthorCoarseMatchAuthority, \ + org.dspace.content.authority.ItemAuthority = DataSetAuthority,\ + org.dspace.content.authority.ItemAuthority = JournalAuthority,\ + org.dspace.content.authority.RorOrgUnitAuthority = OrgUnitAuthority,\ + org.dspace.content.authority.ItemAuthority = ProjectAuthority,\ + org.dspace.content.authority.OpenAIREProjectAuthority = FundingAuthority,\ + org.dspace.content.authority.ItemAuthority = PublicationAuthority,\ + org.dspace.content.authority.ItemAuthority = EquipmentAuthority,\ + org.dspace.content.authority.EPersonAuthority = EPersonAuthority,\ + org.dspace.content.authority.GroupAuthority = GroupAuthority,\ + org.dspace.content.authority.ZDBAuthority = ZDBAuthority,\ + org.dspace.content.authority.SherpaAuthority = SherpaAuthority + +cris.ItemAuthority.forceInternalName = true + +# AuthorStrictMatchAuthority configuration +cris.ItemAuthority.AuthorStrictMatchAuthority.forceInternalName = false + +# AuthorCoarseMatchAuthority configuration +cris.ItemAuthority.AuthorCoarseMatchAuthority.forceInternalName = false + +cris.ItemAuthority.DataSetAuthority.entityType = Product +cris.ItemAuthority.JournalAuthority.entityType = Journal +cris.ItemAuthority.OrgUnitAuthority.entityType = OrgUnit +cris.ItemAuthority.ProjectAuthority.entityType = Project +cris.ItemAuthority.FundingAuthority.entityType = Funding +cris.ItemAuthority.PublicationAuthority.entityType = Publication +cris.ItemAuthority.EquipmentAuthority.entityType = Equipment + +## OrcidAuthority Extras configuration +# + +cris.OrcidAuthority.EditorAuthority.institution.key = oairecerif_editor_affiliation +cris.OrcidAuthority.AuthorAuthority.institution.key = oairecerif_author_affiliation + +#cris.OrcidAuthority.AuthorAuthority.institution.display = true +#cris.OrcidAuthority.AuthorAuthority.institution.as-data = true +# +#cris.OrcidAuthority.AuthorAuthority.orcid-id.key = person_identifier_orcid +#cris.OrcidAuthority.AuthorAuthority.orcid-id.display = true +#cris.OrcidAuthority.AuthorAuthority.orcid-id.as-data = true + + +cris.SherpaAuthority.entityType = Journal +cris.SherpaAuthority.local-item-choices-enabled = true + +ItemAuthority.reciprocalMetadata.Publication.dc.relation.product = dc.relation.publication +ItemAuthority.reciprocalMetadata.Product.dc.relation.publication = dc.relation.product + +choices.plugin.dc.contributor.author = AuthorAuthority +choices.presentation.dc.contributor.author = suggest +authority.controlled.dc.contributor.author = true + +choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority +choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority + +choices.plugin.oairecerif.author.affiliation = OrgUnitAuthority +choices.presentation.oairecerif.author.affiliation = suggest +authority.controlled.oairecerif.author.affiliation = true + +choices.plugin.dc.contributor.editor = EditorAuthority +choices.presentation.dc.contributor.editor = suggest +authority.controlled.dc.contributor.editor = true + +choices.plugin.oairecerif.editor.affiliation = OrgUnitAuthority +choices.presentation.oairecerif.editor.affiliation = suggest +authority.controlled.oairecerif.editor.affiliation = true + +choices.plugin.dc.relation.product = DataSetAuthority +choices.presentation.dc.relation.product = suggest +authority.controlled.dc.relation.product = true + +choices.plugin.dc.relation.publication = PublicationAuthority +choices.presentation.dc.relation.publication = suggest +authority.controlled.dc.relation.publication = true + +choices.plugin.dc.relation.journal = SherpaAuthority +choices.presentation.dc.relation.journal = suggest +authority.controlled.dc.relation.journal = true + +choices.plugin.dc.relation.project = ProjectAuthority +choices.presentation.dc.relation.project = suggest +authority.controlled.dc.relation.project = true + +choices.plugin.dc.relation.funding = FundingAuthority +choices.presentation.dc.relation.funding = suggest +# the externalsource provider identifier +choises.externalsource.dc.relation.funding = fundingAuthority +authority.controlled.dc.relation.funding = true + +choices.plugin.dc.description.sponsorship = OrgUnitAuthority +choices.presentation.dc.description.sponsorship = suggest +authority.controlled.dc.description.sponsorship = true + +### authorities required by Projects +choices.plugin.crispj.coordinator = OrgUnitAuthority +choices.presentation.crispj.coordinator = suggest +authority.controlled.crispj.coordinator = true + +choices.plugin.crispj.organization = OrgUnitAuthority +choices.presentation.crispj.organization = suggest +authority.controlled.crispj.organization = true + +choices.plugin.crispj.partnerou = OrgUnitAuthority +choices.presentation.crispj.partnerou = suggest +authority.controlled.crispj.partnerou = true + +choices.plugin.crispj.investigator = AuthorAuthority +choices.presentation.crispj.investigator = suggest +authority.controlled.crispj.investigator = true + +choices.plugin.crispj.coinvestigators = AuthorAuthority +choices.presentation.crispj.coinvestigators = suggest +authority.controlled.crispj.coinvestigators = true + +choices.plugin.dc.relation.equipment = EquipmentAuthority +choices.presentation.dc.relation.equipment = suggest +authority.controlled.dc.relation.equipment = true + +### authorities required by Person +choices.plugin.person.affiliation.name = OrgUnitAuthority +choices.presentation.person.affiliation.name = suggest +authority.controlled.person.affiliation.name = true + +choices.plugin.oairecerif.person.affiliation = OrgUnitAuthority +choices.presentation.oairecerif.person.affiliation = suggest +authority.controlled.oairecerif.person.affiliation = true + +#choices.plugin.crisrp.workgroup = OrgUnitAuthority +#choices.presentation.crisrp.workgroup = suggest +#authority.controlled.crisrp.workgroup = true + +#choices.plugin.crisrp.qualification = OrgUnitAuthority +#choices.presentation.crisrp.qualification = suggest +#authority.controlled.crisrp.qualification = true + +#choices.plugin.crisrp.education = OrgUnitAuthority +#choices.presentation.crisrp.education = suggest +#authority.controlled.crisrp.education = true + +### authorities required by OrgUnit +choices.plugin.organization.parentOrganization = OrgUnitAuthority +choices.presentation.organization.parentOrganization = suggest +authority.controlled.organization.parentOrganization = true + +choices.plugin.crisou.director= AuthorAuthority +choices.presentation.crisou.director = suggest +authority.controlled.crisou.director = true + +### authorities required by Funding +choices.plugin.oairecerif.funder = OrgUnitAuthority +choices.presentation.oairecerif.funder = suggest +authority.controlled.oairecerif.funder = true + +choices.plugin.oairecerif.fundingParent = FundingAuthority +choices.presentation.oairecerif.fundingParent = suggest +authority.controlled.oairecerif.fundingParent = true + +choices.plugin.crisfund.investigators = AuthorAuthority +choices.presentation.crisfund.investigators = suggest +authority.controlled.crisfund.investigators = true + +choices.plugin.crisfund.coinvestigators = AuthorAuthority +choices.presentation.crisfund.coinvestigators = suggest +authority.controlled.crisfund.coinvestigators = true + +choices.plugin.crisfund.leadorganizations = OrgUnitAuthority +choices.presentation.crisfund.leadorganizations = suggest +authority.controlled.crisfund.leadorganizations = true + +choices.plugin.crisfund.leadcoorganizations = OrgUnitAuthority +choices.presentation.crisfund.leadcoorganizations = suggest +authority.controlled.crisfund.leadcoorganizations = true + +### authorities required by cris features +choices.plugin.cris.policy.eperson = EPersonAuthority +choices.presentation.cris.policy.eperson = suggest +authority.controlled.cris.policy.eperson = true + +choices.plugin.cris.policy.group = GroupAuthority +choices.presentation.cris.policy.group = suggest +authority.controlled.cris.policy.group = true + +choices.plugin.dspace.object.owner = EPersonAuthority +choices.presentation.dspace.object.owner = suggest +authority.controlled.dspace.object.owner = true + +choices.plugin.dc.identifier.issn = ZDBAuthority +choices.presentation.dc.identifier.issn = suggest +authority.controlled.dc.identifier.issn = true + +choices.plugin.dc.relation.ispartof = SherpaAuthority +choices.presentation.dc.relation.ispartof = suggest +authority.controlled.dc.relation.ispartof = true + +authority.controlled.dc.type = true +choices.plugin.dc.type = ControlledVocabularyAuthority + +# DSpace-CRIS stores by default the authority of controlled vocabularies +vocabulary.plugin.authority.store = true diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index fe47289e4315..cc41ac0c6e18 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -4,688 +4,275 @@ - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - +
    - - - dc - title - - false - - onebox - Enter the name of the file. - You must enter a main title for this item. - - - - - dc - description - true - - textarea - Enter a description for the file - - - - - -
    - - - - isAuthorOfPublication - person - true - true - - Add an author - - dc - contributor - author - name - - orcid,my_staff_db - - - - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - - dc - title - alternative - true - - onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - dc - publisher - - false - - - onebox - Enter the name of the publisher of the previously issued instance of this item. - - - - - - dc - identifier - citation - false - - onebox - Enter the standard citation for the previously issued instance of this item. - - - - - - dc - relation - ispartofseries - true - - Technical Report - series - Enter the series and number assigned to this item by your community. - - - - - - dc - identifier - - - true - - qualdrop_value - If the item has any identification numbers or codes associated with -it, please enter the types and the actual numbers or codes. - - - - - - dc - type - - true - - dropdown - Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. - - - - - - dc - language - iso - false - - dropdown - Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. - - - - - -
    dc - contributor - author - - name + title + false - You must enter at least the author. - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + + onebox + Enter the name of the file. + You must enter a main title for this item. - person - affiliation - name - - onebox - false - - Enter the affiliation of the author as stated on the publication. + dc + description + true + + textarea + Enter a description for the file + -
    + + + + + isAuthorOfPublication + person + true + true + + Add an author + + dc + contributor + author + name + + orcid,my_staff_db + + + - orgunit - identifier - name - - onebox + dc + title + false - You must enter at least the Orgunit name. - Enter the name of the Orgunit of this item + + onebox + Enter the main title of the item. + You must enter a main title for this item. + - orgunit - identifier - id - + dc + title + alternative + true + onebox - false - - Enter the id of the Orgunit of this item + If the item has any alternative titles, please enter them here. + - - -
    - - - dc - contributor - author - true - - group - Enter the names of the authors of this item. - - - - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - - dc - title - alternative - true - - onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - dc - publisher - - false - - - onebox - Enter the name of the publisher of the previously issued instance of this item. - - - - - - dc - identifier - citation - false - - onebox - Enter the standard citation for the previously issued instance of this item. - - - - - - dc - relation - ispartofseries - true - - series - Enter the series and number assigned to this item by your community. - - - - - - dc - identifier - - - true - - qualdrop_value - If the item has any identification numbers or codes associated with -it, please enter the types and the actual numbers or codes. - - - - - - dc - type - - true - - dropdown - Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. - - - - - - dc - language - iso - false - - dropdown - Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. - - - - - - orgunit - identifier - name - true - - inline-group - Enter the names of the orgunit of this item. - - - - -
    - - - dc - subject - - - true - - twobox - Enter appropriate subject keywords or phrases. - - srsc - - - - - dc - description - abstract - false - - textarea - Enter the abstract of the item. - - - - - - dc - description - sponsorship - false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - - - - - - dc - description - - false - - textarea - Enter any other description or comments in this box. - - - - -
    - - - dc - subject - - - true - - twobox - Enter appropriate subject keywords or phrases. - - srsc - - - - - dc - description - abstract - false - - textarea - Enter the abstract of the item. - - - - - - dc - description - sponsorship - false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - - - - - - dc - description - - false - - textarea - Enter any other description or comments in this box. - - - - -
    - - - dc - subject - - - true - - twobox - Enter appropriate subject keywords or phrases. - - srsc - - - - - dc - description - abstract - false - - textarea - Enter the abstract of the item. - - - - - - dc - description - sponsorship - false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - - - - - - dc - description - - false - - textarea - Enter any other description or comments in this box. - - - - -
    - - - isJournalOfVolume - periodical - creativework.publisher:somepublishername - - Select the journal related to this volume. - - - - - - dc - title - - onebox - Enter the name of the journal volume - - - - - publicationVolume - volumeNumber - - onebox - Enter the volume of the journal volume - - - - - creativework - datePublished - - date - Enter the issue date of the journal volume - - - - - dc - description - - textarea - Enter the description of the journal volume - - - - - -
    dc - contributor - author - - onebox + date + issued false - You must enter at least the author. - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. + + + + dc + publisher + + false + + + onebox + Enter the name of the publisher of the previously issued instance of this item. + - oairecerif - author - affiliation - + dc + identifier + citation + false + onebox + Enter the standard citation for the previously issued instance of this item. + + + + + + dc + relation + ispartofseries + true + + Technical Report + series + Enter the series and number assigned to this item by your community. + + + + + + dc + identifier + + + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may + have to hold down the "CTRL" or "Shift" key. + + + + + + + dc + language + iso false - - Enter the affiliation of the author as stated on the publication. + + dropdown + Select the language of the main content of the item. If the language does not appear in the + list, please select 'Other'. If the content does not really have a language (for example, if it + is a dataset or an image) please select 'N/A'. + + -
    + + dc contributor - editor - - onebox + author + + name false You must enter at least the author. - The editors of this publication. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh + or Smith, J]. + - oairecerif - editor - affiliation + person + affiliation + name onebox false - - Enter the affiliation of the editor as stated on the publication. + + Enter the affiliation of the author as stated on the publication. -
    + + - dc - relation - project - + orgunit + identifier + name + onebox false - You must enter at least the project name. - Enter the name of project, if any, that has supported this publication + You must enter at least the Orgunit name. + Enter the name of the Orgunit of this item - dc - relation - grantno - + orgunit + identifier + id + onebox false - - + + Enter the id of the Orgunit of this item -
    + + dc - identifier - - qualdrop_value + contributor + author true - - If the item has any identification numbers or codes associated with -it, please enter the types and the actual numbers or codes. + + group + Enter the names of the authors of this item. + dc title + + false onebox - false - You must enter a main title for this item. Enter the main title of the item. + You must enter a main title for this item. + @@ -693,11 +280,11 @@ it, please enter the types and the actual numbers or codes. dc title alternative + true onebox - true - If the item has any alternative titles, please enter them here. + @@ -705,74 +292,123 @@ it, please enter the types and the actual numbers or codes. dc date issued + false + date - false - You must enter at least the year. Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. + + + + dc + publisher + + false + + + onebox + Enter the name of the publisher of the previously issued instance of this item. + dc - contributor - author - - group + identifier + citation + false + + onebox + Enter the standard citation for the previously issued instance of this item. + + + + + + dc + relation + ispartofseries true - - Enter the names of the authors of this item. + + series + Enter the series and number assigned to this item by your community. + dc - contributor - editor - - group + identifier + + true - - The editors of this publication. + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + dc type + + true - onebox - false - You must select a publication type - Select the type of content of the item. - types + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may + have to hold down the "CTRL" or "Shift" key. + + - -
    dc language iso + false dropdown - false - - Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + Select the language of the main content of the item. If the language does not appear in the + list, please select 'Other'. If the content does not really have a language (for example, if it + is a dataset or an image) please select 'N/A'. + + + + + + + orgunit + identifier + name + true + + inline-group + Enter the names of the orgunit of this item. + + +
    dc subject - - tag + + true - + + twobox Enter appropriate subject keywords or phrases. + + srsc @@ -780,1329 +416,2016 @@ it, please enter the types and the actual numbers or codes. dc description abstract + false textarea - false - Enter the abstract of the item. + - -
    dc - relation - publication - - onebox + description + sponsorship false - - The publication where this publication is included. E.g. a book chapter lists here the book, a contribution to a conference lists here the conference proceeding. - book part + + textarea + Enter the names of any sponsors and/or funding codes in the box. + dc - relation - isbn - - onebox + description + false - - The ISBN of the book/report if it was not found in the system - book part + + textarea + Enter any other description or comments in this box. + + +
    dc - relation - doi - - onebox - false - - The DOI of the book/report if it was not found in the system - book part + subject + + + true + + twobox + Enter appropriate subject keywords or phrases. + + srsc dc - relation - ispartof - - onebox + description + abstract false - - The journal or Serie where this publication has been published + + textarea + Enter the abstract of the item. + dc - relation - ispartofseries - - series - true - - Enter the series and number assigned to this item by your community. + description + sponsorship + false + + textarea + Enter the names of any sponsors and/or funding codes in the box. + dc - relation - issn - - onebox + description + false - - The journal or Serie ISSN if it was not found in the system + + textarea + Enter any other description or comments in this box. + + +
    dc - coverage - publication - - onebox - false - - The publication object of the review - review,book review + subject + + + true + + twobox + Enter appropriate subject keywords or phrases. + + srsc dc - coverage - isbn - - onebox + description + abstract false - - The ISBN of the reviewed item if it was not found in the system - review,book review + + textarea + Enter the abstract of the item. + dc - coverage - doi - - onebox + description + sponsorship false - - The DOI of the reviewed item if it was not found in the system - review,book review + + textarea + Enter the names of any sponsors and/or funding codes in the box. + dc description - sponsorship - - onebox - true - - Enter the name of any sponsors. + + false + + textarea + Enter any other description or comments in this box. + + +
    + + + isJournalOfVolume + periodical + creativework.publisher:somepublishername + + Select the journal related to this volume. + + + dc - description - volume + title + + onebox + Enter the name of the journal volume + + + + + publicationVolume + volumeNumber onebox - false - - If applicable, the volume of the publishing channel where this publication appeared + Enter the volume of the journal volume + + + + + creativework + datePublished + + date + Enter the issue date of the journal volume dc description - issue - - onebox - false - - If applicable, the issue of the publishing channel where this publication appeared + + textarea + Enter the description of the journal volume + + + +
    dc - description - startpage - + contributor + author + onebox false - - If applicable, the page where this publication starts + You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh + or Smith, J]. + - dc - description - endpage - + oairecerif + author + affiliation + onebox false - - If applicable, the page where this publication ends + + Enter the affiliation of the author as stated on the publication. -
    + dc - relation - project - - group - true - - Enter the name of project, if any, that has produced this publication. + contributor + editor + + onebox + false + You must enter at least the author. + The editors of this publication. + + + + + oairecerif + editor + affiliation + + onebox + false + + Enter the affiliation of the editor as stated on the publication. + +
    dc relation - conference - + project + onebox - true - - Enter the name of the conference where the item has been presented, if any. + false + You must enter at least the project name. + Enter the name of project, if any, that has supported this publication dc relation - product - + grantno + onebox - true - - Link the item to one or more existent dataset in the repository used or described by the publication or, put here the dataset citation + false + + + +
    dc identifier - citation - - onebox - false - - Enter the standard citation for the previously issued instance of this item. + + qualdrop_value + true + + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + dc - description - - textarea + title + + onebox false - - Enter any other description or comments in this box. + You must enter a main title for this item. + Enter the main title of the item. dc - description - sponsorship - + title + alternative + onebox true - - Enter the name of any sponsors. + + If the item has any alternative titles, please enter them here. - - - -
    - - - dc - title - - onebox - false - You must specify a title for the patent - The title of the patent - - - - - dcterms - dateAccepted - - date - false - - The Approval date. - You can leave out the day and/or month if they aren't applicable. - - - - - dc - date - issued - - date - false - - The registration date of the patent. - You can leave out the day and/or month if they aren't applicable. - - - - - dc - contributor - author - - name - true - - The inventor: The actual devisor of an invention that is the subject of a patent. - - - - - dcterms - rightsHolder - - onebox - true - - The holders of this patent - - - - - dc - publisher - - onebox - true - - The issuer of the patent: the patent office - - - - - dc - identifier - patentno - - onebox - false - - The patent number. - - - - - dc - identifier - patentnumber - - onebox - false - - The patent number. - - - - - dc - type - - dropdown - false - You must select a patent type - Select the type of content of the patent. - - - - - dc - identifier - applicationnumber - - onebox - false - - The Application number. - - - - - dc - date - filled - - date - false - - The date Filled. - - - -
    - - - dc - language - iso - - dropdown - false - - Select the country and its language. - - - - - dc - subject - - onebox - true - - Enter appropriate subject keywords or phrases. - - - - - dc - description - abstract - - textarea - false - - Enter the description of the patent. - - - -
    - - - dc - relation - - onebox - true - - Enter the name of project, if any, that has produced this patent. - - - - - dc - relation - patent - - onebox - true - - Patents that precede (i.e., have priority over) this patent - - - - - dc - relation - references - - onebox - true - - Result outputs that are referenced by this patent - - - - -
    dc - title - - name + date + issued + + date false - You must enter least at the Surname. - + You must enter at least the year. + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + - crisrp - name - - name - false - - + dc + contributor + author + + group + true + + Enter the names of the authors of this item. - crisrp - name - translated - - name - false - - + dc + contributor + editor + + group + true + + The editors of this publication. - crisrp - name - variant - - name - true - - + dc + type + + onebox + false + You must select a publication type + Select the type of content of the item. + types + +
    - person - givenName - - onebox + dc + language + iso + + dropdown false - - + + Select the language of the main content of the item. If the language does not appear in the + list, please select 'Other'. If the content does not really have a language (for example, if it + is a dataset or an image) please select 'N/A'. + + + - person - familyName - - onebox - false - - + dc + subject + + tag + true + + Enter appropriate subject keywords or phrases. - person - birthDate - - date + dc + description + abstract + + textarea false - - + + Enter the abstract of the item. + + +
    + - oairecerif - person - gender - - dropdown + dc + relation + publication + + onebox false - - + + The publication where this publication is included. E.g. a book chapter lists here the book, a + contribution to a conference lists here the conference proceeding. + + book part - person - jobTitle - + dc + relation + isbn + onebox false - - + + The ISBN of the book/report if it was not found in the system + book part + + - person - affiliation - name - + dc + relation + doi + onebox false - - + + The DOI of the book/report if it was not found in the system + book part - crisrp - workgroup - + dc + relation + ispartof + onebox - true - - + false + + The journal or Serie where this publication has been published - oairecerif - identifier - url - - group + dc + relation + ispartofseries + + series true - - + + Enter the series and number assigned to this item by your community. - person - email - + dc + relation + issn + onebox false - - + + The journal or Serie ISSN if it was not found in the system dc - subject - - tag - true - - + coverage + publication + + onebox + false + + The publication object of the review + review,book review - person - identifier - orcid - + dc + coverage + isbn + onebox false - - + + The ISBN of the reviewed item if it was not found in the system + review,book review - person - identifier - scopus-author-id - + dc + coverage + doi + onebox - true - - + false + + The DOI of the reviewed item if it was not found in the system + review,book review - person - identifier - rid - + dc + description + sponsorship + onebox true - - + + Enter the name of any sponsors. - oairecerif - person - affiliation - - group - true - - + dc + description + volume + + onebox + false + + If applicable, the volume of the publishing channel where this publication appeared dc description - abstract - - textarea + issue + + onebox false - - + + If applicable, the issue of the publishing channel where this publication appeared - crisrp - education - - group - true - - + dc + description + startpage + + onebox + false + + If applicable, the page where this publication starts - crisrp - country - + dc + description + endpage + onebox false - - + + If applicable, the page where this publication ends + +
    - crisrp - qualification - + dc + relation + project + group true - - + + Enter the name of project, if any, that has produced this publication. - person - knowsLanguage - - dropdown + dc + relation + conference + + onebox true - - + + Enter the name of the conference where the item has been presented, if any. - cris - policy - eperson - + dc + relation + product + onebox - false - - + true + + Link the item to one or more existent dataset in the repository used or described by the + publication or, put here the dataset citation + - cris - policy - group - + dc + identifier + citation + onebox false - - + + Enter the standard citation for the previously issued instance of this item. - -
    - - - oairecerif - affiliation - role - false - - - onebox - - - - - oairecerif - person - affiliation - false - - - onebox - - You must enter at least the organisation of your affiliation. - - - oairecerif - affiliation - startDate - false - - - date - - - - - oairecerif - affiliation - endDate - false - - - date - - - - - -
    - crisrp - qualification - - onebox - false - You must enter at least the qualification title. - - + dc + description + + textarea + false + + Enter any other description or comments in this box. + + + + + dc + description + sponsorship + + onebox + true + + Enter the name of any sponsors. + + + + + +
    + + + dc + title + + onebox + false + You must specify a title for the patent + The title of the patent + + + + + dcterms + dateAccepted + + date + false + + The Approval date. + You can leave out the day and/or month if they aren't applicable. + + + + + + dc + date + issued + + date + false + + The registration date of the patent. + You can leave out the day and/or month if they aren't applicable. + + + + + + dc + contributor + author + + name + true + + The inventor: The actual devisor of an invention that is the subject of a patent. + + + + + dcterms + rightsHolder + + onebox + true + + The holders of this patent + + + + + dc + publisher + + onebox + true + + The issuer of the patent: the patent office + + + + + dc + identifier + patentno + + onebox + false + + The patent number. + + + + + dc + identifier + patentnumber + + onebox + false + + The patent number. + + + + + dc + type + + dropdown + false + You must select a patent type + Select the type of content of the patent. + + + + + dc + identifier + applicationnumber + + onebox + false + + The Application number. + + + + + dc + date + filled + + date + false + + The date Filled. + + + +
    + + + dc + language + iso + + dropdown + false + + Select the country and its language. + + + + + dc + subject + + onebox + true + + Enter appropriate subject keywords or phrases. + + + + + dc + description + abstract + + textarea + false + + Enter the description of the patent. + + + +
    + + + dc + relation + + onebox + true + + Enter the name of project, if any, that has produced this patent. + + + + + dc + relation + patent + + onebox + true + + Patents that precede (i.e., have priority over) this patent + + + + + dc + relation + references + + onebox + true + + Result outputs that are referenced by this patent + + + + +
    + + + dc + title + + name + false + You must enter least at the Surname. + + crisrp - qualification - start - + name + + name + false + + + + + + + crisrp + name + translated + + name + false + + + + + + + crisrp + name + variant + + name + true + + + + + + + person + givenName + + onebox + false + + + + + person + familyName + + onebox + false + + + + + + + person + birthDate + date false - - + + + + + oairecerif + person + gender + + dropdown + false + + + + + + + person + jobTitle + + onebox + false + + + + + person + affiliation + name + + onebox + false + + + + + + + crisrp + workgroup + + onebox + true + + + + + + + oairecerif + identifier + url + + group + true + + + + + + + person + email + + onebox + false + + + + + + + dc + subject + + tag + true + + + + + + + person + identifier + orcid + + onebox + false + + + + + + + person + identifier + scopus-author-id + + onebox + true + + + + + + + person + identifier + rid + + onebox + true + + + + + + + oairecerif + person + affiliation + + group + true + + + + + + + dc + description + abstract + + textarea + false + + + + + + + crisrp + education + + group + true + + + + + + + crisrp + country + + onebox + false + + + + + + + crisrp + qualification + + group + true + + + + + + + person + knowsLanguage + + dropdown + true + + + + + + + cris + policy + eperson + + onebox + false + + + + + + + cris + policy + group + + onebox + false + + + + + +
    + + + oairecerif + affiliation + role + false + + + onebox + + + + + oairecerif + person + affiliation + false + + + onebox + + You must enter at least the organisation of your affiliation. + + + oairecerif + affiliation + startDate + false + + + date + + + + + oairecerif + affiliation + endDate + false + + + date + + + + + +
    + + + crisrp + qualification + + onebox + false + You must enter at least the qualification title. + + + + + + crisrp + qualification + start + + date + false + + + + + + + crisrp + qualification + end + + date + false + + + + + +
    + + + crisrp + education + + onebox + false + You must enter at least the education title. + + + + + + crisrp + education + start + + date + false + + + + + crisrp + education + end + + date + false + + + + + + + crisrp + education + role + + onebox + false + + + + + +
    + + + oairecerif + identifier + url + + onebox + false + You must enter at least the site url. + + + + + + crisrp + site + title + + onebox + false + + + + + + +
    + + + dc + identifier + + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + please give an identifier + + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may + have to hold down the "CTRL" or "Shift" key. + + + + + + + + dc + identifier + isbn + true + + Book + onebox + Enter the ISBN of the book. + An ISBN is required. + + + + dc + identifier + isbn + true + + Book chapter + onebox + Enter the ISBN of the book in which this chapter appears. + + + + + +
    + + + dc + contributor + author + + name + false + You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh + or Smith, J]. + + + + + + person + affiliation + name + + onebox + false + + Enter the affiliation of the author as stated on the publication. + + + + + dc + language + iso + false + + dropdown + Select the language of the item. + + + + + + + dc + type + + onebox + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types + + + + +
    + + + dc + contributor + author + true + + onebox + Author field that can be associated with an authority providing suggestion + + + + + + dc + contributor + editor + false + + name + Editor field that can be associated with an authority providing the special name lookup + + + + + + dc + subject + true + + onebox + Subject field that can be associated with an authority providing lookup + + + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + dc + subject + true + + onebox + Subject field that can be associated with an authority providing lookup + + + + dc + description + abstract + false + + textarea + Enter the abstract of the item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't applicable. + + You must enter at least the year. + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + +
    + + + dc + title + + false + + onebox + + + dc + date + issued + false + + date + submission + submission + + + dc + type + true + + onebox + submission + + + dc + language + iso + true + + onebox + all + + + dc + contributor + author + true + + onebox + workflow + + + dc + contributor + editor + true + + onebox + submission + all + + + dc + subject + true + + onebox + workflow + workflow + + + dc + description + false + + onebox + workflow + submission + + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't applicable. + + You must enter at least the year. + + + + + dc + type + + onebox + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types + + + + +
    + + + dc + title + + false + + onebox + Field required + + + + +
    + + + dc + type + + false + + onebox + Field required + + + + +
    + + + dc + identifier + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + + + + + + dc + title + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + dc + title + alternative + true + + onebox + If the item has any alternative titles, please enter them here. + + + + + + dc + date + issued + false + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. + + + + + dc + contributor + author + true + + group + Enter the names of the authors of this item. + + + + + + dc + contributor + group + true + + onebox + The editors of this publication. + + + + + + dc + type + false + + onebox + Select the type(s) of content of the item. + You must select a publication type + publication-coar-types + + + +
    + + + dc + contributor + author + false + + onebox + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh + or Smith, J]. + + You must enter at least the author. + + + + + oairecerif + author + affiliation + false + + onebox + Enter the affiliation of the author as stated on the publication. + + + + +
    + + + dc + contributor + editor + false + + onebox + The editors of this publication. + You must enter at least the author. + + + + + oairecerif + editor + affiliation + false + + onebox + Enter the affiliation of the editor as stated on the publication. + + + + + +
    + + + dc + identifier + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + + - crisrp - qualification - end - - date + dc + title false - - + + onebox + Enter the main title of the item. + You must enter a main title for this item. - -
    - crisrp - education - + dc + title + alternative + true + onebox - false - You must enter at least the education title. - + If the item has any alternative titles, please enter them here. + - crisrp - education - start - - date + dc + date + issued false - - - - - crisrp - education - end - + date - false - - + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + + You must enter at least the year. - crisrp - education - role - - onebox - false - - + dc + contributor + author + true + + group + Enter the names of the authors of this item. + - -
    - oairecerif - identifier - url - + dc + contributor + group + true + onebox - false - You must enter at least the site url. - + The editors of this publication. + - crisrp - site - title - - onebox + dc + type false - - + + onebox + Select the type(s) of content of the item. + You must select a publication type + publication-coar-types - -
    - - - dc - identifier - - true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - please give an identifier - - - - -
    - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - - dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - - dc - type - - true - - dropdown - Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. - - - - - - - dc - identifier - isbn - true - - Book - onebox - Enter the ISBN of the book. - An ISBN is required. - - - - dc - identifier - isbn - true - - Book chapter - onebox - Enter the ISBN of the book in which this chapter appears. - - - - - -
    + dc contributor author - - name false + + onebox + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, + J]. + You must enter at least the author. - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - person - affiliation - name + oairecerif + author + affiliation + false onebox - false - Enter the affiliation of the author as stated on the publication. + - - - dc - language - iso - false - - dropdown - Select the language of the item. - - - - + +
    dc - type - - onebox + contributor + editor false - You must select a publication type - Select the type(s) of content of the item. - publication-coar-types + + onebox + The editors of this publication. + You must enter at least the author. - - -
    - - - dc - contributor - author - true - - onebox - Author field that can be associated with an authority providing suggestion - - - - + - dc - contributor - editor - false - - name - Editor field that can be associated with an authority providing the special name lookup - + oairecerif + editor + affiliation + false + + onebox + Enter the affiliation of the editor as stated on the publication. + - - - - dc - subject - true - - onebox - Subject field that can be associated with an authority providing lookup - - - - - -
    - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - dc - subject - true - - onebox - Subject field that can be associated with an authority providing lookup - - - - dc - description - abstract - false - - textarea - Enter the abstract of the item. - - - - - - dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't applicable. - - You must enter at least the year. - - - -
    - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - -
    - - - dc - title - - false - - onebox - - - dc - date - issued - false - - date - submission - submission - - - dc - type - true - - onebox - submission - - - dc - language - iso - true - - onebox - all - - - dc - contributor - author - true - - onebox - workflow - - - dc - contributor - editor - true - - onebox - submission - all - - - dc - subject - true - - onebox - workflow - workflow - - - dc - description - false - - onebox - workflow - submission - - - - -
    - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't applicable. - You must enter at least the year. - - - - - dc - type - - onebox - false - You must select a publication type - Select the type(s) of content of the item. - publication-coar-types - - - - -
    - - - dc - title - - false - - onebox - Field required - - - - -
    - - - dc - type - - false - - onebox - Field required - - +
    - -
    +
    - - - - - - - - - - - - + + + + + + + + + + + + - - - - DOI - doi - - - Scopus ID - scopus - - - WOS ID - isi - + + + + DOI + doi + + + Scopus ID + scopus + + + WOS ID + isi + Ads Code adsbibcode @@ -2115,172 +2438,172 @@ it, please enter the types and the actual numbers or codes. arXiv ID arxiv - - ISSN - issn - - - Other - other - - - ISMN - ismn - - - Gov't Doc # - govdoc - - - URI - uri - - - ISBN - isbn - - + + ISSN + issn + + + Other + other + + + ISMN + ismn + + + Gov't Doc # + govdoc + + + URI + uri + + + ISBN + isbn + + - - - Animation - Animation - - - Article - Article - - - Book - Book - - - Book chapter - Book chapter - - - Dataset - Dataset - - - Learning Object - Learning Object - - - Image - Image - - - Image, 3-D - Image, 3-D - - - Map - Map - - - Musical Score - Musical Score - - - Plan or blueprint - Plan or blueprint - - - Preprint - Preprint - - - Presentation - Presentation - - - Recording, acoustical - Recording, acoustical - - - Recording, musical - Recording, musical - - - Recording, oral - Recording, oral - - - Software - Software - - - Technical Report - Technical Report - - - Thesis - Thesis - - - Video - Video - - - Working Paper - Working Paper - - - Other - Other - - + + + Animation + Animation + + + Article + Article + + + Book + Book + + + Book chapter + Book chapter + + + Dataset + Dataset + + + Learning Object + Learning Object + + + Image + Image + + + Image, 3-D + Image, 3-D + + + Map + Map + + + Musical Score + Musical Score + + + Plan or blueprint + Plan or blueprint + + + Preprint + Preprint + + + Presentation + Presentation + + + Recording, acoustical + Recording, acoustical + + + Recording, musical + Recording, musical + + + Recording, oral + Recording, oral + + + Software + Software + + + Technical Report + Technical Report + + + Thesis + Thesis + + + Video + Video + + + Working Paper + Working Paper + + + Other + Other + + - - - - N/A - - - - English (United States) - en_US - - - English - en - - - Spanish - es - - - German - de - - - French - fr - - - Italian - it - - - Japanese - ja - - - Chinese - zh - - - Turkish - tr - - - (Other) - other - - + + + + N/A + + + + English (United States) + en_US + + + English + en + + + Spanish + es + + + German + de + + + French + fr + + + Italian + it + + + Japanese + ja + + + Chinese + zh + + + Turkish + tr + + + (Other) + other + + Italia @@ -2302,9 +2625,9 @@ it, please enter the types and the actual numbers or codes. Botswana BW - - - + + + Use by owner Use by owner @@ -2500,7 +2823,7 @@ it, please enter the types and the actual numbers or codes. Unknown - + Male @@ -2516,6 +2839,6 @@ it, please enter the types and the actual numbers or codes. - +
    diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml index eca9acf79fd7..8956710a3e3a 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml @@ -7,285 +7,286 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - org.dspace.app.rest.submit.step.CollectionStep - collection - submission - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.steptwo - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - - submit.progressbar.upload - org.dspace.app.rest.submit.step.UploadStep - upload - - - submit.progressbar.license - org.dspace.app.rest.submit.step.LicenseStep - license - submission - - - - - - - - submit.progressbar.CClicense - org.dspace.app.rest.submit.step.CCLicenseStep - cclicense - - - - - - - - - - - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - - - submit.progressbar.identifiers - org.dspace.app.rest.submit.step.ShowIdentifiersStep - identifiers - - - - - Sample - org.dspace.submit.step.SampleStep - sample - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + submission + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.steptwo + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + submit.progressbar.upload + org.dspace.app.rest.submit.step.UploadStep + upload + + + submit.progressbar.license + org.dspace.app.rest.submit.step.LicenseStep + license + submission + + + + + + + + + submit.progressbar.CClicense + org.dspace.app.rest.submit.step.CCLicenseStep + cclicense + + + + + + + + + + + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + + submit.progressbar.identifiers + org.dspace.app.rest.submit.step.ShowIdentifiersStep + identifiers + + + + + Sample + org.dspace.submit.step.SampleStep + sample + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index d2d159775ad2..6af3ca6d5a83 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -155,16 +155,6 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form - - submit.progressbar.describe.green - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.green - org.dspace.app.rest.submit.step.DescribeStep - submission-form - submit.progressbar.describe.owner org.dspace.app.rest.submit.step.DescribeStep @@ -360,16 +350,6 @@
    - - - - - - - - - - diff --git a/dspace/config/modules/authority.cfg b/dspace/config/modules/authority.cfg index 10e4bc36fbb9..99ccdbc025bb 100644 --- a/dspace/config/modules/authority.cfg +++ b/dspace/config/modules/authority.cfg @@ -136,8 +136,8 @@ choices.plugin.dc.contributor.author = AuthorAuthority choices.presentation.dc.contributor.author = suggest authority.controlled.dc.contributor.author = true -choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority -choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority +#choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority +#choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority choices.plugin.oairecerif.author.affiliation = OrgUnitAuthority choices.presentation.oairecerif.author.affiliation = suggest diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 784b8ee301aa..ddf410abec3b 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -581,285 +581,6 @@ -
    - - - dc - identifier - true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - - - - - dc - title - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - dc - title - alternative - true - - onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - - dc - contributor - author - true - - group - Enter the names of the authors of this item. - - - - - - dc - contributor - group - true - - onebox - The editors of this publication. - - - - - - dc - type - false - - onebox - Select the type(s) of content of the item. - You must select a publication type - publication-coar-types - - - -
    - - - dc - contributor - author - false - - onebox - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - You must enter at least the author. - - - - - oairecerif - author - affiliation - false - - onebox - Enter the affiliation of the author as stated on the publication. - - - - -
    - - - dc - contributor - editor - false - - onebox - The editors of this publication. - You must enter at least the author. - - - - - oairecerif - editor - affiliation - false - - onebox - Enter the affiliation of the editor as stated on the publication. - - - - -
    - - - dc - identifier - true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - - - - - dc - title - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - - - - - dc - title - alternative - true - - onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - You must enter at least the year. - - - - - dc - contributor - author - true - - group - Enter the names of the authors of this item. - - - - - - dc - contributor - group - true - - onebox - The editors of this publication. - - - - - - dc - type - false - - onebox - Select the type(s) of content of the item. - You must select a publication type - publication-coar-types - - - -
    - - - dc - contributor - author - false - - onebox - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - You must enter at least the author. - - - - - oairecerif - author - affiliation - false - - onebox - Enter the affiliation of the author as stated on the publication. - - - - -
    - - - dc - contributor - editor - false - - onebox - The editors of this publication. - You must enter at least the author. - - - - - oairecerif - editor - affiliation - false - - onebox - Enter the affiliation of the editor as stated on the publication. - - - - -
    From c8566bdac7aa2297a6b194dcb3a45e0192b9bd62 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Wed, 29 Nov 2023 17:07:42 +0200 Subject: [PATCH 623/686] [DSC-737] Restrict export formats by groups and Bulk Item export with metadata and bitstream --- .../dspace/content/BitstreamServiceImpl.java | 7 + .../crosswalk/METSDisseminationCrosswalk.java | 23 +- .../crosswalks/ItemExportCrosswalk.java | 5 + .../METSStreamDisseminationCrosswalk.java | 63 ++++ .../crosswalks/ReferCrosswalk.java | 48 +++ .../crosswalks/ZipItemExportCrosswalk.java | 325 ++++++++++++++++++ .../service/ItemExportFormatServiceImpl.java | 2 + .../content/service/BitstreamService.java | 2 + .../crosswalks/ZipItemExportCrosswalkIT.java | 256 ++++++++++++++ dspace/config/spring/api/crosswalks.xml | 18 +- 10 files changed, 744 insertions(+), 5 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java create mode 100644 dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java create mode 100644 dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index eef8a859157d..0682082e03f8 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -407,6 +407,13 @@ public Bitstream getBitstreamByName(Item item, String bundleName, String bitstre return null; } + @Override + public List getBitstreamByBundleName(Item item, String bundleName) throws SQLException { + return itemService.getBundles(item, bundleName).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); + } + @Override public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLException { List bundles = itemService.getBundles(item, bundleName); diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java index b8a4a8aef390..e6156e78d295 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java @@ -72,6 +72,16 @@ public class METSDisseminationCrosswalk private static final String schemaLocation = METS_NS.getURI() + " " + METS_XSD; + private String metsPackagerPlugin; + + public METSDisseminationCrosswalk() { + this.metsPackagerPlugin = METS_PACKAGER_PLUGIN; + } + + public METSDisseminationCrosswalk(String metsPackagerPlugin) { + this.metsPackagerPlugin = metsPackagerPlugin; + } + @Override public Namespace[] getNamespaces() { return (Namespace[]) ArrayUtils.clone(namespaces); @@ -103,10 +113,10 @@ public Element disseminateElement(Context context, DSpaceObject dso) PackageDisseminator dip = (PackageDisseminator) CoreServiceFactory.getInstance().getPluginService() - .getNamedPlugin(PackageDisseminator.class, METS_PACKAGER_PLUGIN); + .getNamedPlugin(PackageDisseminator.class, metsPackagerPlugin); if (dip == null) { throw new CrosswalkInternalException( - "Cannot find a disseminate plugin for package=" + METS_PACKAGER_PLUGIN); + "Cannot find a disseminate plugin for package=" + metsPackagerPlugin); } try { @@ -117,11 +127,16 @@ public Element disseminateElement(Context context, DSpaceObject dso) // Create a temporary file to disseminate into ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - String tempDirectory = (configurationService.hasProperty("upload.temp.dir")) + String tempDirectoryPath = (configurationService.hasProperty("upload.temp.dir")) ? configurationService.getProperty("upload.temp.dir") : System.getProperty("java.io.tmpdir"); - File tempFile = File.createTempFile("METSDissemination" + dso.hashCode(), null, new File(tempDirectory)); + File tempDirectory = new File(tempDirectoryPath); + if (!tempDirectory.exists()) { + tempDirectory.mkdirs(); + } + + File tempFile = File.createTempFile("METSDissemination" + dso.hashCode(), null, tempDirectory); tempFile.deleteOnExit(); // Disseminate METS to temp file diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java index 3a8b5a1524d1..dba686198e8a 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ItemExportCrosswalk.java @@ -11,6 +11,7 @@ import org.dspace.content.crosswalk.CrosswalkMode; import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.core.Context; /** * Implementation of {@link StreamDisseminationCrosswalk} related to item @@ -40,4 +41,8 @@ public default Optional getEntityType() { public default CrosswalkMode getCrosswalkMode() { return CrosswalkMode.SINGLE; } + + public default boolean isAuthorized(Context context) { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java new file mode 100644 index 000000000000..292a1e14f946 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/METSStreamDisseminationCrosswalk.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import java.io.IOException; +import java.io.OutputStream; +import java.sql.SQLException; +import javax.annotation.PostConstruct; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.METSDisseminationCrosswalk; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.core.Context; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; + +/** + * Implementation of {@link StreamDisseminationCrosswalk} that produces a METS + * manifest for the DSpace item as a metadata description, using + * {@link METSDisseminationCrosswalk}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class METSStreamDisseminationCrosswalk implements StreamDisseminationCrosswalk { + + private METSDisseminationCrosswalk metsDisseminationCrosswalk; + + @PostConstruct + public void setup() { + metsDisseminationCrosswalk = new METSDisseminationCrosswalk("AIP"); + } + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return metsDisseminationCrosswalk.canDisseminate(dso); + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + + Element element = metsDisseminationCrosswalk.disseminateElement(context, dso); + + XMLOutputter xmlOutputter = new XMLOutputter(Format.getPrettyFormat()); + xmlOutputter.output(element, out); + + } + + @Override + public String getMIMEType() { + return "application/xml"; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java index d54fef41ee68..519d9531cb71 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ReferCrosswalk.java @@ -58,6 +58,9 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfigurationUtilsService; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; import org.dspace.services.ConfigurationService; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -94,6 +97,9 @@ public class ReferCrosswalk implements ItemExportCrosswalk { @Autowired private MetadataSecurityService metadataSecurityService; + @Autowired + private GroupService groupService; + private Converter converter; private Consumer> linesPostProcessor; @@ -116,6 +122,8 @@ public class ReferCrosswalk implements ItemExportCrosswalk { private CrosswalkMode crosswalkMode; + private List allowedGroups; + @PostConstruct private void postConstruct() throws IOException { String parent = configurationService.getProperty("dspace.dir") + File.separator + "config" + File.separator; @@ -128,6 +136,21 @@ private void postConstruct() throws IOException { } } + @Override + public boolean isAuthorized(Context context) { + if (CollectionUtils.isEmpty(allowedGroups)) { + return true; + } + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return allowedGroups.contains(Group.ANONYMOUS); + } + + return allowedGroups.stream() + .anyMatch(groupName -> isMemberOfGroupNamed(context, ePerson, groupName)); + } + @Override public void disseminate(Context context, DSpaceObject dso, OutputStream out) throws CrosswalkException, IOException, SQLException, AuthorizeException { @@ -136,6 +159,10 @@ public void disseminate(Context context, DSpaceObject dso, OutputStream out) throw new CrosswalkObjectNotSupported("Can only crosswalk an Item with the configured type: " + entityType); } + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + List lines = getItemLines(context, dso, true); if (linesPostProcessor != null) { @@ -154,6 +181,10 @@ public void disseminate(Context context, Iterator dsoIte throw new UnsupportedOperationException("No template defined for multiple items"); } + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + List lines = new ArrayList(); for (TemplateLine line : multipleItemsTemplateLines) { @@ -466,6 +497,15 @@ private boolean hasExpectedEntityType(Item item) { return Objects.equals(itemEntityType, entityType); } + private boolean isMemberOfGroupNamed(Context context, EPerson ePerson, String groupName) { + try { + Group group = groupService.findByName(context, groupName); + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + public void setConverter(Converter converter) { this.converter = converter; } @@ -525,4 +565,12 @@ public void setPubliclyReadable(boolean isPubliclyReadable) { this.publiclyReadable = isPubliclyReadable; } + public List getAllowedGroups() { + return allowedGroups; + } + + public void setAllowedGroups(List allowedGroups) { + this.allowedGroups = allowedGroups; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java new file mode 100644 index 000000000000..2096fa037273 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalk.java @@ -0,0 +1,325 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.crosswalk.CrosswalkException; +import org.dspace.content.crosswalk.CrosswalkMode; +import org.dspace.content.crosswalk.CrosswalkObjectNotSupported; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.exception.SQLRuntimeException; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.storage.bitstore.service.BitstreamStorageService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link ItemExportCrosswalk} that export all the given items + * creating a zip. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ZipItemExportCrosswalk implements ItemExportCrosswalk { + + private static final Logger LOGGER = LoggerFactory.getLogger(ZipItemExportCrosswalk.class); + + @Autowired + private ItemService itemService; + + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private BitstreamStorageService bitstreamStorageService; + + @Autowired + private GroupService groupService; + + private String zipName = "items.zip"; + + private String entityType; + + private String bitstreamBundle = "ORIGINAL"; + + private String metadataFileName; + + private StreamDisseminationCrosswalk crosswalk; + + private CrosswalkMode crosswalkMode = CrosswalkMode.MULTIPLE; + + private List allowedGroups; + + @Override + public boolean isAuthorized(Context context) { + if (CollectionUtils.isEmpty(allowedGroups)) { + return true; + } + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return allowedGroups.contains(Group.ANONYMOUS); + } + + return allowedGroups.stream() + .anyMatch(groupName -> isMemberOfGroupNamed(context, ePerson, groupName)); + } + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return dso.getType() == Constants.ITEM && hasExpectedEntityType((Item) dso); + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + this.disseminate(context, Arrays.asList(dso).iterator(), out); + } + + @Override + public void disseminate(Context context, Iterator dsoIterator, OutputStream out) + throws CrosswalkException, IOException, SQLException, AuthorizeException { + + Assert.notNull(metadataFileName, "The name of the metadata file is required to perform a bulk item export"); + Assert.notNull(crosswalk, "An instance of DisseminationCrosswalk is required to perform a bulk item export"); + Assert.notNull(zipName, "The name of the zip to be generated is required to perform a bulk item export"); + + if (!isAuthorized(context)) { + throw new AuthorizeException("The current user is not allowed to perform a zip item export"); + } + + createZip(context, dsoIterator, out); + + } + + private void createZip(Context context, Iterator dsoIterator, OutputStream out) + throws CrosswalkObjectNotSupported, IOException { + + try (ZipOutputStream zos = new ZipOutputStream(out)) { + + while (dsoIterator.hasNext()) { + + DSpaceObject dso = dsoIterator.next(); + if (!canDisseminate(context, dso)) { + throw new CrosswalkObjectNotSupported( + "Can only crosswalk an Item with the configured type: " + entityType); + } + + try { + createFolder(context, (Item) dso, zos); + } catch (Exception ex) { + LOGGER.error("An error occurs creating folder for item " + dso.getID(), ex); + } + + } + + } + + } + + private void createFolder(Context context, Item item, ZipOutputStream zos) throws IOException { + + createMetadataEntry(context, item, zos); + + List bitstreams = getBitstreamToExport(item); + for (Bitstream bitstream : bitstreams) { + try { + addBitstreamEntry(context, item, bitstream, zos); + } catch (Exception ex) { + LOGGER.error("An error occurs adding bitstream " + bitstream.getID() + + " to the folder of item " + item.getID(), ex); + } + } + + } + + private void createMetadataEntry(Context context, Item item, ZipOutputStream zos) throws IOException { + ZipEntry metadataEntry = new ZipEntry(getFolderName(item) + "/" + getMetadataFileName()); + zos.putNextEntry(metadataEntry); + zos.write(getMetadataFileNameContent(context, item)); + zos.closeEntry(); + } + + private byte[] getMetadataFileNameContent(Context context, Item item) { + try { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + crosswalk.disseminate(context, item, out); + return out.toByteArray(); + } catch (CrosswalkException | IOException | SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + } + + private List getBitstreamToExport(Item item) { + try { + return bitstreamService.getBitstreamByBundleName(item, bitstreamBundle); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + } + + private void addBitstreamEntry(Context context, Item item, Bitstream bitstream, ZipOutputStream zos) + throws IOException { + + InputStream bitstreamContent = retrieveContent(context, bitstream); + + ZipEntry bitstreamEntry = new ZipEntry(getFolderName(item) + "/" + getBitstreamFileName(context, bitstream)); + zos.putNextEntry(bitstreamEntry); + + try { + writeBitstreamContent(bitstreamContent, zos); + } finally { + zos.closeEntry(); + } + + } + + private void writeBitstreamContent(InputStream content, ZipOutputStream zos) throws IOException { + byte[] bytes = new byte[1024]; + int length; + while ((length = content.read(bytes)) >= 0) { + zos.write(bytes, 0, length); + } + } + + private String getBitstreamFileName(Context context, Bitstream bitstream) { + String name = "bitstream_" + bitstream.getID().toString(); + return getBitstreamExtension(context, bitstream) + .map(extension -> name + "." + extension) + .orElse(name); + } + + private Optional getBitstreamExtension(Context context, Bitstream bitstream) { + try { + return bitstream.getFormat(context).getExtensions().stream().findFirst(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private InputStream retrieveContent(Context context, Bitstream bitstream) { + try { + return bitstreamStorageService.retrieve(context, bitstream); + } catch (SQLException | IOException e) { + throw new RuntimeException(e); + } + } + + private String getMetadataFileName() { + return metadataFileName; + } + + private String getFolderName(Item item) { + return item.getID().toString(); + } + + private boolean isMemberOfGroupNamed(Context context, EPerson ePerson, String groupName) { + try { + Group group = groupService.findByName(context, groupName); + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getMIMEType() { + return "application/octet-stream"; + } + + public void setCrosswalkMode(CrosswalkMode crosswalkMode) { + this.crosswalkMode = crosswalkMode; + } + + @Override + public CrosswalkMode getCrosswalkMode() { + return Optional.ofNullable(this.crosswalkMode).orElse(CrosswalkMode.MULTIPLE); + } + + private boolean hasExpectedEntityType(Item item) { + if (StringUtils.isBlank(entityType)) { + return true; + } + return entityType.equals(itemService.getEntityType(item)); + } + + @Override + public String getFileName() { + return getZipName(); + } + + public String getZipName() { + return zipName; + } + + public void setZipName(String zipName) { + this.zipName = zipName; + } + + public Optional getEntityType() { + return Optional.ofNullable(entityType); + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public StreamDisseminationCrosswalk getCrosswalk() { + return crosswalk; + } + + public void setCrosswalk(StreamDisseminationCrosswalk crosswalk) { + this.crosswalk = crosswalk; + } + + public String getBitstreamBundle() { + return bitstreamBundle; + } + + public void setBitstreamBundle(String bitstreamBundle) { + this.bitstreamBundle = bitstreamBundle; + } + + public void setMetadataFileName(String metadataFileName) { + this.metadataFileName = metadataFileName; + } + + public List getAllowedGroups() { + return allowedGroups; + } + + public void setAllowedGroups(List allowedGroups) { + this.allowedGroups = allowedGroups; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java index 4d33ba35c5e8..5745ec3e8ce8 100644 --- a/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/integration/crosswalks/service/ItemExportFormatServiceImpl.java @@ -45,6 +45,7 @@ public ItemExportFormat get(Context context, String id) { public List getAll(Context context) { return this.streamDissiminatorCrosswalkMapper.getAllItemExportCrosswalks().entrySet().stream() + .filter(entry -> entry.getValue().isAuthorized(context)) .map(entry -> buildItemExportFormat(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); @@ -58,6 +59,7 @@ public List byEntityTypeAndMolteplicity(Context context, Strin .entrySet().stream() .filter(entry -> hasSameMolteplicity(entry.getValue(), molteplicity)) .filter(entry -> hasSameEntityType(entry.getValue(), entityType)) + .filter(entry -> entry.getValue().isAuthorized(context)) .map(entry -> buildItemExportFormat(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); diff --git a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java index 85a4fd140e9a..fa1cbc38beae 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java @@ -210,6 +210,8 @@ public InputStream retrieve(Context context, Bitstream bitstream) public Bitstream getBitstreamByName(Item item, String bundleName, String bitstreamName) throws SQLException; + List getBitstreamByBundleName(Item item, String bundleName) throws SQLException; + public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLException; public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException; diff --git a/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java new file mode 100644 index 000000000000..e824fef5a9b1 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/integration/crosswalks/ZipItemExportCrosswalkIT.java @@ -0,0 +1,256 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.integration.crosswalks; + +import static org.dspace.builder.CollectionBuilder.createCollection; +import static org.dspace.builder.CommunityBuilder.createCommunity; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; + +import org.apache.commons.collections.IteratorUtils; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.utils.DSpace; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class ZipItemExportCrosswalkIT extends AbstractIntegrationTestWithDatabase { + + private ZipItemExportCrosswalk zipItemExportCrosswalk; + + private Community community; + + private Collection collection; + + @Before + public void setup() throws SQLException, AuthorizeException { + + zipItemExportCrosswalk = new DSpace().getServiceManager() + .getServicesByType(ZipItemExportCrosswalk.class).get(0); + + context.turnOffAuthorisationSystem(); + community = createCommunity(context).build(); + collection = createCollection(context, community).build(); + context.restoreAuthSystemState(); + + } + + @Test + public void testItemsExportWithAdmin() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + Bitstream bitstream1 = createBitstream(item1, "test.txt", "This is a test"); + Bitstream bitstream2 = createBitstream(item3, "test.pdf", "Last test", "6 months"); + + String expectedEmbargo = LocalDate.now().plus(6, ChronoUnit.MONTHS).format(DateTimeFormatter.ISO_DATE); + + context.restoreAuthSystemState(); + + context.setCurrentUser(admin); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos); + } + + try (ZipFile zipFile = new ZipFile(tempZip)) { + + ZipEntry zipEntry = zipFile.getEntry(item1.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + String metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2022-01-01")); + assertThat(metsContent, + containsString("Test Item 1")); + assertThat(metsContent, containsString("Luca Giamminonni")); + assertThat(metsContent, + containsString("test@email.com")); + assertThat(metsContent, + containsString("test.txt")); + + zipEntry = zipFile.getEntry(item1.getID().toString() + "/bitstream_" + bitstream1.getID().toString()); + assertThat(zipEntry, notNullValue()); + assertThat(getZipEntryContent(zipFile, zipEntry), is("This is a test")); + + zipEntry = zipFile.getEntry(item2.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2022-03-01")); + assertThat(metsContent, + containsString("Test Item 2")); + assertThat(metsContent, containsString("Walter White")); + assertThat(metsContent, + containsString("test@email.com")); + + zipEntry = zipFile.getEntry(item3.getID().toString() + "/mets.xml"); + assertThat(zipEntry, notNullValue()); + + metsContent = getZipEntryContent(zipFile, zipEntry); + + assertThat(metsContent, containsString( + "2020-01-01")); + assertThat(metsContent, + containsString("Test Item 3")); + assertThat(metsContent, containsString("Andrea Bollini")); + assertThat(metsContent, + containsString("test@email.com")); + assertThat(metsContent, containsString("")); + assertThat(metsContent, + containsString("test.pdf")); + + zipEntry = zipFile.getEntry(item3.getID().toString() + "/bitstream_" + bitstream2.getID().toString()); + assertThat(zipEntry, notNullValue()); + assertThat(getZipEntryContent(zipFile, zipEntry), is("Last test")); + + assertThat(getAllEntries(zipFile), hasSize(5)); + + } + + } + + @Test + public void testItemsExportWithCurators() throws Exception { + + context.turnOffAuthorisationSystem(); + + Group curators = GroupBuilder.createGroup(context) + .withName("Curators") + .build(); + + EPerson user = EPersonBuilder.createEPerson(context) + .withEmail("user@test.com") + .withGroupMembership(curators) + .build(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + context.restoreAuthSystemState(); + + context.setCurrentUser(user); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos); + } + + try (ZipFile zipFile = new ZipFile(tempZip)) { + assertThat(getAllEntries(zipFile), hasSize(3)); + } + + } + + @Test + public void testItemsExportWithNotAuthorizedUser() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item1 = createItem("Test Item 1", "2022-01-01", "Luca Giamminonni"); + Item item2 = createItem("Test Item 2", "2022-03-01", "Walter White"); + Item item3 = createItem("Test Item 3", "2020-01-01", "Andrea Bollini"); + + context.restoreAuthSystemState(); + + context.setCurrentUser(eperson); + + File tempZip = File.createTempFile("test", "zip"); + tempZip.deleteOnExit(); + + try (FileOutputStream fos = new FileOutputStream(tempZip)) { + + AuthorizeException authorizeException = Assert.assertThrows(AuthorizeException.class, + () -> zipItemExportCrosswalk.disseminate(context, List.of(item1, item2, item3).iterator(), fos)); + + assertThat(authorizeException.getMessage(), + is("The current user is not allowed to perform a zip item export")); + } + + } + + private Item createItem(String title, String issueDate, String author) { + return ItemBuilder.createItem(context, collection) + .withTitle(title) + .withIssueDate(issueDate) + .withAuthor(author) + .build(); + } + + private Bitstream createBitstream(Item item, String name, String content) throws Exception { + return BitstreamBuilder.createBitstream(context, item, getInputStream(content)) + .withName(name) + .build(); + } + + private Bitstream createBitstream(Item item, String name, String content, String embargoPeriod) throws Exception { + return BitstreamBuilder.createBitstream(context, item, getInputStream(content)) + .withName(name) + .withEmbargoPeriod(embargoPeriod) + .build(); + } + + private String getZipEntryContent(ZipFile zipFile, ZipEntry zipEntry) throws IOException { + return IOUtils.toString(zipFile.getInputStream(zipEntry), StandardCharsets.UTF_8); + } + + private InputStream getInputStream(String str) { + return IOUtils.toInputStream(str, StandardCharsets.UTF_8); + } + + @SuppressWarnings("unchecked") + private List getAllEntries(ZipFile zipFile) { + return IteratorUtils.toList(zipFile.entries().asIterator()); + } + +} diff --git a/dspace/config/spring/api/crosswalks.xml b/dspace/config/spring/api/crosswalks.xml index 34941fe7b0d4..9184a56482da 100644 --- a/dspace/config/spring/api/crosswalks.xml +++ b/dspace/config/spring/api/crosswalks.xml @@ -68,6 +68,7 @@ + @@ -509,7 +510,22 @@ - + + + + + + + + + Administrator + Curators + + + + + + From 7e63d47718be812b8ba7566da85274f26b18bce3 Mon Sep 17 00:00:00 2001 From: Alexander K Date: Mon, 20 Nov 2023 15:44:11 +0100 Subject: [PATCH 624/686] [DSC-820] add bitstream.hide metadata --- dspace/config/submission-forms.xml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 784b8ee301aa..c44ac175c1c7 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -37,6 +37,17 @@ + + + bitstream + hide + + dropdown + false + + + +
    From afb9f8f240a4f9de08a572edef70b1616433be20 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Tue, 7 Nov 2023 19:44:01 +0100 Subject: [PATCH 625/686] [DSC-1350] fix name permutations to skip too long names on best match plugin --- .../src/main/java/org/dspace/util/PersonNameUtil.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java index 0e88a0a9cdf5..2751db759e91 100644 --- a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java +++ b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java @@ -109,10 +109,13 @@ private static List getAllNamePermutations(String name) { List namePermutations = new ArrayList(); - PermutationIterator permutationIterator = new PermutationIterator(List.of(name.split(" "))); + List names = List.of(name.split(" ")); + if (names.size() < 5) { + PermutationIterator permutationIterator = new PermutationIterator(names); - while (permutationIterator.hasNext()) { - namePermutations.add(String.join(" ", permutationIterator.next())); + while (permutationIterator.hasNext()) { + namePermutations.add(String.join(" ", permutationIterator.next())); + } } return namePermutations; From 59216dc425ff0ff295149b1aca12e5fec8110dea Mon Sep 17 00:00:00 2001 From: Nikita Krivonosov Date: Mon, 13 Nov 2023 11:02:12 +0100 Subject: [PATCH 626/686] [DSC-1350] - Add warning log --- .../SolrServiceBestMatchIndexingPlugin.java | 2 +- .../java/org/dspace/util/PersonNameUtil.java | 17 ++++++++++++----- .../org/dspace/util/PersonNameUtilTest.java | 18 +++++++++++------- 3 files changed, 24 insertions(+), 13 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java index 39130e9224d2..a1830a3931c7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceBestMatchIndexingPlugin.java @@ -68,7 +68,7 @@ protected void addIndexValueForPersonItem(Item item, SolrInputDocument document) String lastName = getMetadataValue(item, LASTNAME_FIELD); List fullNames = getMetadataValues(item, FULLNAME_FIELDS); - getAllNameVariants(firstName, lastName, fullNames) + getAllNameVariants(firstName, lastName, fullNames, item.getID().toString()) .forEach(variant -> addIndexValue(document, variant)); } diff --git a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java index 2751db759e91..cea02c76990b 100644 --- a/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java +++ b/dspace-api/src/main/java/org/dspace/util/PersonNameUtil.java @@ -16,6 +16,7 @@ import org.apache.commons.collections4.iterators.PermutationIterator; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; /** * Utility class that handle person names. @@ -24,6 +25,7 @@ * */ public final class PersonNameUtil { + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PersonNameUtil.class); private PersonNameUtil() { @@ -35,12 +37,14 @@ private PersonNameUtil() { * @param firstName the first name * @param lastName the last name * @param fullNames the full names + * @param uuid the uuid * @return all the variants of the given names */ - public static Set getAllNameVariants(String firstName, String lastName, List fullNames) { + public static Set getAllNameVariants(String firstName, String lastName, List fullNames, + String uuid) { Set variants = new HashSet(); variants.addAll(getNameVariants(firstName, lastName)); - variants.addAll(getNameVariants(fullNames)); + variants.addAll(getNameVariants(fullNames, uuid)); return variants; } @@ -95,17 +99,17 @@ private static List getNameVariants(String[] firstNames, String lastName return variants; } - private static List getNameVariants(List fullNames) { + private static List getNameVariants(List fullNames, String uuid) { return fullNames.stream() .filter(Objects::nonNull) .map(name -> removeComma(name)) .distinct() - .flatMap(name -> getAllNamePermutations(name).stream()) + .flatMap(name -> getAllNamePermutations(name, uuid).stream()) .distinct() .collect(Collectors.toList()); } - private static List getAllNamePermutations(String name) { + private static List getAllNamePermutations(String name, String uuid) { List namePermutations = new ArrayList(); @@ -116,6 +120,9 @@ private static List getAllNamePermutations(String name) { while (permutationIterator.hasNext()) { namePermutations.add(String.join(" ", permutationIterator.next())); } + } else { + log.warn(String.format("Cannot retrieve variants on the Person with UUID %s because the name is too long", + uuid)); } return namePermutations; diff --git a/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java b/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java index fe80bf143756..c0c5a0c02194 100644 --- a/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java +++ b/dspace-api/src/test/java/org/dspace/util/PersonNameUtilTest.java @@ -27,7 +27,8 @@ public class PersonNameUtilTest { @Test public void testWithAllNames() { - Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, Luca", "Luke Giammo")); + Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, Luca", + "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni", "Luke Giammo", "Giammo Luke")); @@ -37,7 +38,7 @@ public void testWithAllNames() { public void testWithFirstNameComposedByTwoNames() { Set variants = getAllNameVariants("Luca Paolo", "Giamminonni", - List.of("Giamminonni, Luca", "Luke Giammo")); + List.of("Giamminonni, Luca", "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Paolo", "Luca Paolo Giamminonni", "Giamminonni Luca", "Luca Giamminonni", "Giamminonni Paolo", "Paolo Giamminonni", @@ -51,7 +52,7 @@ public void testWithFirstNameComposedByTwoNames() { public void testWithFirstNameComposedByThreeNames() { Set variants = getAllNameVariants("Luca Paolo Claudio", "Giamminonni", - List.of("Giamminonni, Luca", "Luke Giammo")); + List.of("Giamminonni, Luca", "Luke Giammo"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Paolo Claudio", "Luca Paolo Claudio Giamminonni", "Giamminonni Luca Claudio", "Luca Claudio Giamminonni", "Giamminonni Paolo Claudio", @@ -69,7 +70,8 @@ public void testWithFirstNameComposedByThreeNames() { @Test public void testWithoutFirstAndLastName() { - Set variants = getAllNameVariants(null, null, List.of("Giamminonni, Luca Fabio", "Luke Giammo")); + Set variants = getAllNameVariants(null, null, List.of("Giamminonni, Luca Fabio", "Luke Giammo"), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca Fabio", "Fabio Luca Giamminonni", "Giamminonni Fabio Luca", "Luca Fabio Giamminonni", "Luca Giamminonni Fabio", @@ -80,12 +82,13 @@ public void testWithoutFirstAndLastName() { @Test public void testWithAlreadyTruncatedName() { - Set variants = getAllNameVariants("L.", "Giamminonni", List.of("Giamminonni, Luca")); + Set variants = getAllNameVariants("L.", "Giamminonni", List.of("Giamminonni, Luca"), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni")); - variants = getAllNameVariants("L. P.", "Giamminonni", List.of("Giamminonni, Luca")); + variants = getAllNameVariants("L. P.", "Giamminonni", List.of("Giamminonni, Luca"), "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "L. Giamminonni", "Giamminonni L.", "P. Giamminonni", "Giamminonni P.", "Giamminonni L. P.", "L. P. Giamminonni", @@ -97,7 +100,8 @@ public void testWithAlreadyTruncatedName() { @Test public void testWithAlreadyTruncatedNameOnFullName() { - Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, L.")); + Set variants = getAllNameVariants("Luca", "Giamminonni", List.of("Giamminonni, L."), + "uuid"); assertThat(variants, containsInAnyOrder("Giamminonni Luca", "Luca Giamminonni", "Giamminonni L.", "L. Giamminonni", "Giamminonni L", "L Giamminonni")); From a3123a0f5ed9e3824b409e877c4572d825620394 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 30 Nov 2023 17:52:08 +0100 Subject: [PATCH 627/686] [maven-release-plugin] prepare for next development iteration --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index aae4b282b654..530c30d52c83 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 08c2fe062265..a3de08e3d9cf 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index f3b7e23bf4e5..f2489890d233 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index f5acab4dfc70..d2160ac9ccf5 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index e1ade345ba72..f521f08a1411 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index a2add8447d8b..5cc96e573686 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 2caba151f403..49f16408277d 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 716361a5406c..bdd807db8185 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 8393d311b4ca..2fa0056ba7b3 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 9dae87e503ba..a8e149a66036 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 6e9daa881871..1ee925c3a30f 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index fa3083bda8d7..cbcd4a53d86b 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index eeb283d96ea2..1f1409094b56 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 8e9226149995..5192a9c1c8e9 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index a06395b3c0b3..4c3a4635dd33 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -958,14 +958,14 @@ org.dspace dspace-rest - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT jar classes org.dspace dspace-rest - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT war @@ -1116,69 +1116,69 @@ org.dspace dspace-api - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-api test-jar - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT test org.dspace.modules additions - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-sword - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-swordv2 - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-oai - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-services - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-server-webapp test-jar - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT test org.dspace dspace-rdf - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-iiif - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT org.dspace dspace-server-webapp - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT jar classes org.dspace dspace-server-webapp - cris-2023.02.00 + cris-2023.02.01-SNAPSHOT war @@ -2024,7 +2024,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2023.02.00 + dspace-cris-2022.02.00 From 8cf6288470407fb9086afb105bf00eb661d3cf90 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Wed, 27 Dec 2023 17:37:34 +0100 Subject: [PATCH 628/686] [maven-release-plugin] prepare release dspace-cris-2023.02.01 --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 530c30d52c83..9a9f8423cbca 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index a3de08e3d9cf..bdbc87b6d9e0 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index f2489890d233..91e284a70823 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index d2160ac9ccf5..87119a7c7942 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index f521f08a1411..8116f48d4e51 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 5cc96e573686..0f431238a0d7 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 49f16408277d..94fac3ffa4b8 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index bdd807db8185..8bf4957f0563 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 2fa0056ba7b3..7c1902f87a46 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index a8e149a66036..d1fb6830e27e 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 1ee925c3a30f..67840b9cc8bf 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index cbcd4a53d86b..3dda1eb77bc2 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 1f1409094b56..b3168d9f66f0 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 5192a9c1c8e9..3d6ea9789a93 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 ../pom.xml diff --git a/pom.xml b/pom.xml index 4c3a4635dd33..bd1b50c52eee 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -958,14 +958,14 @@ org.dspace dspace-rest - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 jar classes org.dspace dspace-rest - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 war @@ -1116,69 +1116,69 @@ org.dspace dspace-api - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-api test-jar - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 test org.dspace.modules additions - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-sword - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-swordv2 - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-oai - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-services - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-server-webapp test-jar - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 test org.dspace dspace-rdf - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-iiif - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 org.dspace dspace-server-webapp - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 jar classes org.dspace dspace-server-webapp - cris-2023.02.01-SNAPSHOT + cris-2023.02.01 war @@ -2024,7 +2024,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2022.02.00 + dspace-cris-2023.02.01 From ebc6b8e0442f3fe359d7c716caf32ad32e8136dd Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Wed, 3 Jan 2024 21:21:56 +0100 Subject: [PATCH 629/686] DSC-1461 Allow administrators to always perform PUT/Patches operations over items (inprogress or archived) --- .../impl/EditMetadataFeature.java | 4 +- .../rest/repository/ItemRestRepository.java | 13 +++-- .../dspace/app/rest/ItemRestRepositoryIT.java | 52 ++++++++++++++----- 3 files changed, 51 insertions(+), 18 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java index 820de57b7246..80a052224fb8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java @@ -60,12 +60,12 @@ public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLEx ) { String defaultGroupUUID = configurationService.getProperty("edit.metadata.allowed-group"); if (StringUtils.isBlank(defaultGroupUUID)) { - return authorizeServiceRestUtil.authorizeActionBoolean(context, object,DSpaceRestPermission.WRITE); + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); } Group defaultGroup = StringUtils.isNotBlank(defaultGroupUUID) ? groupService.find(context, UUID.fromString(defaultGroupUUID)) : null; if (Objects.nonNull(defaultGroup) && groupService.isMember(context, defaultGroup)) { - return authorizeServiceRestUtil.authorizeActionBoolean(context, object,DSpaceRestPermission.WRITE); + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); } } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java index b64da66af9cf..8c7e89565371 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ItemRestRepository.java @@ -40,6 +40,7 @@ import org.dspace.app.rest.model.patch.Patch; import org.dspace.app.rest.repository.handler.service.UriListHandlerService; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Item; @@ -106,6 +107,9 @@ public class ItemRestRepository extends DSpaceObjectRestRepository findAll(Context context, Pageable pageable) { protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id, Patch patch) throws AuthorizeException, SQLException { Item item = itemService.find(context, id); - if (!editMetadataFeature.isAuthorized(context, converter.toRest(item, utils.obtainProjection()))) { + if (!authorizeService.isAdmin(context) && + !editMetadataFeature.isAuthorized(context, converter.toRest(item, utils.obtainProjection()))) { throw new AccessDeniedException("Current user not authorized for this operation"); } patchDSpaceObject(apiCategory, model, id, patch); @@ -350,13 +355,13 @@ protected ItemRest put(Context context, HttpServletRequest request, String apiCa } catch (IOException e1) { throw new UnprocessableEntityException("Error parsing request body", e1); } - if (!editMetadataFeature.isAuthorized(context, itemRest)) { - throw new AccessDeniedException("Current user not authorized for this operation"); - } Item item = itemService.find(context, uuid); if (item == null) { throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found"); } + if (!authorizeService.isAdmin(context) && !editMetadataFeature.isAuthorized(context, itemRest)) { + throw new AccessDeniedException("Current user not authorized for this operation"); + } if (StringUtils.equals(uuid.toString(), itemRest.getId())) { metadataConverter.setMetadata(context, item, itemRest.getMetadata()); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 29caf8def121..7e0edf08bac9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -4552,8 +4552,8 @@ public void putItemMetadataWithUserNotPartOfGroupConfigured() throws Exception { itemRest.setInArchive(true); itemRest.setDiscoverable(true); itemRest.setWithdrawn(false); - String token = getAuthToken(admin.getEmail(), password); - MvcResult mvcResult = getClient(token).perform(post("/api/core/items?owningCollection=" + + String adminToken = getAuthToken(admin.getEmail(), password); + MvcResult mvcResult = getClient(adminToken).perform(post("/api/core/items?owningCollection=" + col1.getID().toString()) .content(mapper.writeValueAsBytes(itemRest)) .contentType(contentType)) @@ -4569,12 +4569,25 @@ public void putItemMetadataWithUserNotPartOfGroupConfigured() throws Exception { itemRest.setHandle(itemHandleString); Group group = GroupBuilder.createGroup(context).build(); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); + // add write rights to the user + ResourcePolicyBuilder.createResourcePolicy(context) + .withUser(eperson) + .withAction(WRITE) + .withDspaceObject(itemService.find(context, UUID.fromString(itemUuidString))) + .build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); // expect forbidden, the user is not part of the group set in property {{edit.metadata.allowed-group}} getClient(token).perform(put("/api/core/items/" + itemUuidString) .content(mapper.writeValueAsBytes(itemRest)) .contentType(contentType)) .andExpect(status().isForbidden()); + // admins should still be able to use put + getClient(adminToken).perform(put("/api/core/items/" + itemUuidString) + .content(mapper.writeValueAsBytes(itemRest)) + .contentType(contentType)) + .andExpect(status().isOk()); } finally { ItemBuilder.deleteItem(UUID.fromString(itemUuidString)); } @@ -4635,7 +4648,7 @@ public void putItemMetadataWithUserPartOfGroupConfigured() throws Exception { context.restoreAuthSystemState(); token = getAuthToken(eperson.getEmail(), password); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); - // expect forbidden, the user is not part of the group set in property {{edit.metadata.allowed-group}} + // expect ok, the user is part of the group set in property {{edit.metadata.allowed-group}} getClient(token).perform(put("/api/core/items/" + itemUuidString) .content(mapper.writeValueAsBytes(itemRest)) .contentType(contentType)) @@ -4907,7 +4920,7 @@ public void findVersionForItemWithoutVersionsWithVersioningDisabledTest() throws public void patchItemMetadataWithUserPartOfGroupConfigured() throws Exception { context.turnOffAuthorisationSystem(); // add admin person as member to the group - Group group = GroupBuilder.createGroup(context).addMember(admin).build(); + Group group = GroupBuilder.createGroup(context).addMember(eperson).build(); groupService.update(context, group); context.commit(); // ** GIVEN ** @@ -4930,15 +4943,19 @@ public void patchItemMetadataWithUserPartOfGroupConfigured() throws Exception { .build(); // add write permission to the user admin ResourcePolicyBuilder.createResourcePolicy(context) - .withUser(admin) + .withUser(eperson) .withAction(WRITE) .withDspaceObject(itemService.find(context, item.getID())) .build(); context.restoreAuthSystemState(); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); - String token = getAuthToken(admin.getEmail(), password); + String token = getAuthToken(eperson.getEmail(), password); List ops = new ArrayList(); - ReplaceOperation replaceOperation = new ReplaceOperation("/withdrawn", true); + List> titleValue = new ArrayList<>(); + Map value = new HashMap(); + value.put("value", "New title"); + titleValue.add(value); + ReplaceOperation replaceOperation = new ReplaceOperation("/metadata/dc.title", titleValue); ops.add(replaceOperation); String patchBody = getPatchContent(ops); // withdraw item @@ -4948,8 +4965,7 @@ public void patchItemMetadataWithUserPartOfGroupConfigured() throws Exception { .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isOk()) .andExpect(jsonPath("$.uuid", Matchers.is(item.getID().toString()))) - .andExpect(jsonPath("$.withdrawn", Matchers.is(true))) - .andExpect(jsonPath("$.inArchive", Matchers.is(false))); + .andExpect(jsonPath("$.metadata['dc.title'][0].value", Matchers.is("New title"))); } @Test @@ -4974,7 +4990,7 @@ public void patchItemMetadataWithUserNotPartOfGroupConfigured() throws Exception .build(); // add write rights to the user admin ResourcePolicyBuilder.createResourcePolicy(context) - .withUser(admin) + .withUser(eperson) .withAction(WRITE) .withDspaceObject(itemService.find(context, item.getID())) .build(); @@ -4984,9 +5000,13 @@ public void patchItemMetadataWithUserNotPartOfGroupConfigured() throws Exception context.commit(); context.restoreAuthSystemState(); configurationService.setProperty("edit.metadata.allowed-group", group.getID()); - String token = getAuthToken(admin.getEmail(), password); + String token = getAuthToken(eperson.getEmail(), password); List ops = new ArrayList(); - ReplaceOperation replaceOperation = new ReplaceOperation("/withdrawn", true); + List> titleValue = new ArrayList<>(); + Map value = new HashMap(); + value.put("value", "New title"); + titleValue.add(value); + ReplaceOperation replaceOperation = new ReplaceOperation("/metadata/dc.title", titleValue); ops.add(replaceOperation); String patchBody = getPatchContent(ops); // withdraw item @@ -4995,6 +5015,14 @@ public void patchItemMetadataWithUserNotPartOfGroupConfigured() throws Exception .content(patchBody) .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().isForbidden()); + token = getAuthToken(admin.getEmail(), password); + //expect ok as admin + getClient(token).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.uuid", Matchers.is(item.getID().toString()))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", Matchers.is("New title"))); } @Test From 1d64bb3d1f57e104e075b4fb301d000be1da4435 Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Wed, 3 Jan 2024 16:58:35 +0200 Subject: [PATCH 630/686] [DSC-1418] extend createDisambiguatedOrganization to traverse orgunit hierarchy --- .../factory/impl/OrcidAffiliationFactory.java | 18 +++++---- .../impl/OrcidCommonObjectFactoryImpl.java | 38 ++++++++++++++++--- 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java index d74f05bcaf50..5b325a44a37a 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidAffiliationFactory.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -56,9 +57,9 @@ public OrcidAffiliationFactory(OrcidProfileSectionType sectionType, OrcidProfile @Override public List getMetadataFields() { - return List.of(organizationField, roleField, startDateField, endDateField).stream() - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + return Stream.of(organizationField, roleField, startDateField, endDateField) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); } @Override @@ -80,14 +81,17 @@ public Object create(Context context, List metadataValues) { orcidCommonObjectFactory.createFuzzyDate(endDate).ifPresent(affiliation::setEndDate); affiliation.setRoleTitle(isUnprocessableValue(role) ? null : role.getValue()); - orcidCommonObjectFactory.createOrganization(context, organization).ifPresent(affiliation::setOrganization); + orcidCommonObjectFactory.createOrganization(context, organization).ifPresent(org -> { + affiliation.setOrganization(org); + affiliation.setDepartmentName(org.getName()); + }); return affiliation; } @Override public List getMetadataSignatures(Context context, Item item) { - List signatures = new ArrayList(); + List signatures = new ArrayList<>(); Map> metadataGroups = getMetadataGroups(item); int groupSize = metadataGroups.getOrDefault(organizationField, Collections.emptyList()).size(); @@ -95,7 +99,7 @@ public List getMetadataSignatures(Context context, Item item) { List metadataValues = getMetadataValueByPlace(metadataGroups, currentGroupIndex); //only "visible" metadatavalues within this group metadataValues = metadataValues.stream() - .filter(metadataValue -> isAllowedMetadataByVisibility(metadataValue)) + .filter(this::isAllowedMetadataByVisibility) .collect(Collectors.toList()); if (!metadataValues.isEmpty()) { signatures.add(metadataSignatureGenerator.generate(context, metadataValues)); @@ -168,7 +172,7 @@ private Map> getMetadataGroups(Item item) { } private List getMetadataValueByPlace(Map> metadataGroups, int place) { - List metadataValues = new ArrayList(); + List metadataValues = new ArrayList<>(); for (String metadataField : metadataGroups.keySet()) { List nestedMetadataValues = metadataGroups.get(metadataField); if (nestedMetadataValues.size() > place) { diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java index 48cda3b3da20..8d92e72b80c0 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java @@ -121,7 +121,9 @@ public Optional createOrganization(Context context, MetadataValue Item organizationItem = findRelatedItem(context, metadataValue); if (organizationItem != null) { organization.setAddress(createOrganizationAddress(organizationItem)); - organization.setDisambiguatedOrganization(createDisambiguatedOrganization(organizationItem)); + organization.setDisambiguatedOrganization( + createDisambiguatedOrganization(context, organizationItem) + ); } return of(organization); @@ -156,7 +158,7 @@ public Optional createFundingContributor(Context context, Me FundingContributor contributor = new FundingContributor(); contributor.setCreditName(new CreditName(metadataValue.getValue())); - contributor.setContributorAttributes(getFundingContributorAttributes(metadataValue, role)); + contributor.setContributorAttributes(getFundingContributorAttributes(role)); Item authorItem = findItem(context, UUIDUtils.fromString(metadataValue.getAuthority())); if (authorItem != null) { @@ -190,7 +192,7 @@ public Optional createCountry(Context context, MetadataValue metadataVa throw new OrcidValidationException(OrcidValidationError.INVALID_COUNTRY); } - return country.map(isoCountry -> new Country(isoCountry)); + return country.map(Country::new); } private ContributorAttributes getContributorAttributes(MetadataValue metadataValue, ContributorRole role) { @@ -211,8 +213,7 @@ private OrganizationAddress createOrganizationAddress(Item organizationItem) { return address; } - private FundingContributorAttributes getFundingContributorAttributes(MetadataValue metadataValue, - FundingContributorRole role) { + private FundingContributorAttributes getFundingContributorAttributes(FundingContributorRole role) { FundingContributorAttributes attributes = new FundingContributorAttributes(); attributes.setContributorRole(role != null ? role.value() : null); return attributes; @@ -237,11 +238,23 @@ private DisambiguatedOrganization createDisambiguatedOrganization(Item organizat return null; } + private DisambiguatedOrganization createDisambiguatedOrganization(Context context, Item organizationItem) { + DisambiguatedOrganization disambiguatedOrganization = createDisambiguatedOrganization(organizationItem); + Item parentOrganization = findParentOrganization(context, organizationItem); + + while (disambiguatedOrganization == null && parentOrganization != null) { + disambiguatedOrganization = createDisambiguatedOrganization(parentOrganization); + parentOrganization = findParentOrganization(context, parentOrganization); + } + + return disambiguatedOrganization; + } + private Optional convertToIso3166Country(String countryValue) { return ofNullable(countryValue) .map(value -> countryConverter != null ? countryConverter.getValue(value) : value) .filter(value -> isValidEnum(Iso3166Country.class, value)) - .map(value -> Iso3166Country.fromValue(value)); + .map(Iso3166Country::fromValue); } private boolean isUnprocessableValue(MetadataValue value) { @@ -249,6 +262,19 @@ private boolean isUnprocessableValue(MetadataValue value) { || value.getValue().equals(PLACEHOLDER_PARENT_METADATA_VALUE); } + private Item findParentOrganization(Context context, Item item) { + try { + Optional metadataValue = + itemService.getMetadataByMetadataString(item, "organization.parentOrganization") + .stream().findFirst(); + return metadataValue.isPresent() + ? itemService.find(context, UUIDUtils.fromString(metadataValue.get().getAuthority())) + : null; + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private Item findRelatedItem(Context context, MetadataValue metadataValue) { try { return itemService.find(context, UUIDUtils.fromString(metadataValue.getAuthority())); From 1446d3b279ec47f6ba8e70c38c8fc307a63071be Mon Sep 17 00:00:00 2001 From: "yevhenii.lohatskyi" Date: Wed, 3 Jan 2024 16:59:38 +0200 Subject: [PATCH 631/686] [DSC-1418] add test for disambiguation from orgunit hierarchy --- .../OrcidProfileSectionFactoryServiceIT.java | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java index 4d2e85a5b99e..b5880d7014b9 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java @@ -242,6 +242,63 @@ public void testFullEmploymentCreation() { } + @Test + public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreation() { + + context.turnOffAuthorisationSystem(); + + Item orgUnitWithRinId = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science with rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withOrgUnitRinggoldIdentifier("12345") + .build(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science with rin", orgUnitWithRinId.getID().toString()) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonAffiliation("4Science", orgUnit.getID().toString()) + .withPersonAffiliationStartDate("2020-02") + .withPersonAffiliationEndDate(PLACEHOLDER_PARENT_METADATA_VALUE) + .withPersonAffiliationRole("Researcher") + .build(); + + context.restoreAuthSystemState(); + + List values = new ArrayList<>(); + values.add(getMetadata(item, "oairecerif.person.affiliation", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.startDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.endDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.role", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, AFFILIATION); + assertThat(firstOrcidObject, instanceOf(Employment.class)); + Employment qualification = (Employment) firstOrcidObject; + assertThat(qualification.getStartDate(), notNullValue()); + assertThat(qualification.getStartDate().getYear().getValue(), is("2020")); + assertThat(qualification.getStartDate().getMonth().getValue(), is("02")); + assertThat(qualification.getStartDate().getDay().getValue(), is("01")); + assertThat(qualification.getEndDate(), nullValue()); + assertThat(qualification.getRoleTitle(), is("Researcher")); + assertThat(qualification.getDepartmentName(), is("4Science")); + + Organization organization = qualification.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), notNullValue()); + assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); + assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); + } + @Test public void testQualificationCreation() { context.turnOffAuthorisationSystem(); From 080d3eb559351ca82fa7245a1e340110e64acc36 Mon Sep 17 00:00:00 2001 From: Daniele Ninfo Date: Fri, 5 Jan 2024 16:03:20 +0100 Subject: [PATCH 632/686] [DSC-1418] Add new test cases --- .../OrcidProfileSectionFactoryServiceIT.java | 121 ++++++++++++++++++ 1 file changed, 121 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java index b5880d7014b9..60122ba5dbde 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java @@ -298,6 +298,127 @@ public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreation() { assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); } + + @Test + public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithAncestor() { + + context.turnOffAuthorisationSystem(); + + Item orgUnitGranfather = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science with rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withOrgUnitRinggoldIdentifier("12345") + .build(); + + Item orgUnitFather = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science without rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science with rin", orgUnitGranfather.getID().toString()) + .build(); + + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science without rin", orgUnitFather.getID().toString()) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonAffiliation("4Science", orgUnit.getID().toString()) + .withPersonAffiliationStartDate("2020-02") + .withPersonAffiliationEndDate(PLACEHOLDER_PARENT_METADATA_VALUE) + .withPersonAffiliationRole("Researcher") + .build(); + + context.restoreAuthSystemState(); + + List values = new ArrayList<>(); + values.add(getMetadata(item, "oairecerif.person.affiliation", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.startDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.endDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.role", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, AFFILIATION); + assertThat(firstOrcidObject, instanceOf(Employment.class)); + Employment qualification = (Employment) firstOrcidObject; + assertThat(qualification.getStartDate(), notNullValue()); + assertThat(qualification.getStartDate().getYear().getValue(), is("2020")); + assertThat(qualification.getStartDate().getMonth().getValue(), is("02")); + assertThat(qualification.getStartDate().getDay().getValue(), is("01")); + assertThat(qualification.getEndDate(), nullValue()); + assertThat(qualification.getRoleTitle(), is("Researcher")); + assertThat(qualification.getDepartmentName(), is("4Science")); + + Organization organization = qualification.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), notNullValue()); + assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); + assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); + } + + @Test + public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithNoId() { + + context.turnOffAuthorisationSystem(); + + Item orgUnitWithRinId = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science with rin") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .build(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withTitle("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withParentOrganization("4Science with rin", orgUnitWithRinId.getID().toString()) + .build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonAffiliation("4Science", orgUnit.getID().toString()) + .withPersonAffiliationStartDate("2020-02") + .withPersonAffiliationEndDate(PLACEHOLDER_PARENT_METADATA_VALUE) + .withPersonAffiliationRole("Researcher") + .build(); + + context.restoreAuthSystemState(); + + List values = new ArrayList<>(); + values.add(getMetadata(item, "oairecerif.person.affiliation", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.startDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.endDate", 0)); + values.add(getMetadata(item, "oairecerif.affiliation.role", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, AFFILIATION); + assertThat(firstOrcidObject, instanceOf(Employment.class)); + Employment qualification = (Employment) firstOrcidObject; + assertThat(qualification.getStartDate(), notNullValue()); + assertThat(qualification.getStartDate().getYear().getValue(), is("2020")); + assertThat(qualification.getStartDate().getMonth().getValue(), is("02")); + assertThat(qualification.getStartDate().getDay().getValue(), is("01")); + assertThat(qualification.getEndDate(), nullValue()); + assertThat(qualification.getRoleTitle(), is("Researcher")); + assertThat(qualification.getDepartmentName(), is("4Science")); + + Organization organization = qualification.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), nullValue()); + } + + @Test public void testQualificationCreation() { From 4edc8e101f6fab31a5cae7f233b1f44f70fd62d3 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sat, 6 Jan 2024 14:47:56 +0100 Subject: [PATCH 633/686] Move pom.xml to the next maintenance version --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 32 ++++++++++++++++---------------- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 9a9f8423cbca..1c6879a5d701 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index bdbc87b6d9e0..3b56ba53e832 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 91e284a70823..76718f44ba3c 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 87119a7c7942..eb63a67e4579 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index 8116f48d4e51..257d0b3a91f8 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 0f431238a0d7..c94bfaeed6ae 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 94fac3ffa4b8..867ac1dc1a7e 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 8bf4957f0563..10a44f3615f6 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 7c1902f87a46..e76bfca65b9f 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index d1fb6830e27e..7de65e9ca49e 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 67840b9cc8bf..af44a7efc2e7 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index 3dda1eb77bc2..4dfa2939bf90 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index b3168d9f66f0..e8a714dfd25e 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -13,7 +13,7 @@ just adding new jar in the classloader modules org.dspace - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 3d6ea9789a93..9d21239d1034 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index bd1b50c52eee..7cfaf08e0011 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -958,14 +958,14 @@ org.dspace dspace-rest - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT jar classes org.dspace dspace-rest - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT war @@ -1116,69 +1116,69 @@ org.dspace dspace-api - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-api test-jar - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT test org.dspace.modules additions - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-sword - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-swordv2 - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-oai - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-services - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-server-webapp test-jar - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT test org.dspace dspace-rdf - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-iiif - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT org.dspace dspace-server-webapp - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT jar classes org.dspace dspace-server-webapp - cris-2023.02.01 + cris-2023.02.02-SNAPSHOT war @@ -2024,7 +2024,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2023.02.01 + dspace-cris-2023.02.02-SNAPSHOT From ba9ee87c5aa218c29e758830fe193ec0b6d3b01a Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Sun, 7 Jan 2024 15:29:09 +0100 Subject: [PATCH 634/686] [DSC-1458] now ItemEnhancer avoid to update item when it is not strictly necessary --- .../dspace/content/enhancer/ItemEnhancer.java | 2 + .../impl/RelatedEntityItemEnhancer.java | 66 +++++++++++++++++-- .../service/impl/ItemEnhancerServiceImpl.java | 21 ++++-- 3 files changed, 77 insertions(+), 12 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java index f0e3d9d15649..a3784146c5ac 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java @@ -47,4 +47,6 @@ public interface ItemEnhancer { * @param item the item to enhance */ void enhance(Context context, Item item); + + boolean needUpdate(Context context, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java index a6c97cc84e65..1eb85afe817c 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java @@ -59,22 +59,78 @@ public boolean canEnhance(Context context, Item item) { @Override public void enhance(Context context, Item item) { try { - cleanObsoleteVirtualFields(context, item); - updateVirtualFieldsPlaces(context, item); - performEnhancement(context, item); + boolean isMetadataDeleted = cleanObsoleteVirtualFields(context, item); + if (isMetadataDeleted) { + updateVirtualFieldsPlaces(context, item); + } + if (needUpdate(context, item)) { + performEnhancement(context, item); + } } catch (SQLException e) { LOGGER.error("An error occurs enhancing item with id {}: {}", item.getID(), e.getMessage(), e); throw new SQLRuntimeException(e); } } - private void cleanObsoleteVirtualFields(Context context, Item item) throws SQLException { + @Override + public boolean needUpdate(Context context, Item item) { + List metadataValuesToDelete = getObsoleteVirtualFields(item); + boolean isNeedUpdateMetadata = false; + + if (!noEnhanceableMetadata(context, item)) { + for (MetadataValue metadataValue : getEnhanceableMetadataValue(item)) { + if (wasValueAlreadyUsedForEnhancement(item, metadataValue)) { + continue; + } + + Item relatedItem = findRelatedEntityItem(context, metadataValue); + if (relatedItem == null) { + isNeedUpdateMetadata = true; + break; + } + + List relatedItemMetadataValues = + getMetadataValues(relatedItem, relatedItemMetadataField); + if (relatedItemMetadataValues.isEmpty()) { + isNeedUpdateMetadata = true; + break; + } + for (MetadataValue relatedItemMetadataValue : relatedItemMetadataValues) { + if (!isContainingMetadata(item, relatedItemMetadataValue.getValue())) { + isNeedUpdateMetadata = true; + break; + } + } + + } + } + + return !metadataValuesToDelete.isEmpty() || isNeedUpdateMetadata; + } + + private boolean isContainingMetadata(Item item, String value) { + return itemService.getMetadata(item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_METADATA_ELEMENT, + getVirtualQualifier(), null, true).stream() + .anyMatch(metadataValue -> metadataValue.getValue().equals(value)); + } + + + /** + * Clean obsolete virtual fields. + * + * @param context the DSpace Context + * @param item the item to check + * @return true if some metadata is deleted, false if no metadata was deleted + */ + private boolean cleanObsoleteVirtualFields(Context context, Item item) throws SQLException { List metadataValuesToDelete = getObsoleteVirtualFields(item); if (!metadataValuesToDelete.isEmpty()) { itemService.removeMetadataValues(context, item, metadataValuesToDelete); + return true; + } else { + return false; } - } private void updateVirtualFieldsPlaces(Context context, Item item) { diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java index 90f9181a5e37..bc040eb1f7b9 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java @@ -42,13 +42,20 @@ public class ItemEnhancerServiceImpl implements ItemEnhancerService { @Override public void enhance(Context context, Item item) { + boolean isUpdateNeeded = false; + + for (ItemEnhancer itemEnhancer : itemEnhancers) { + if (itemEnhancer.canEnhance(context, item)) { + if (itemEnhancer.needUpdate(context,item)) { + itemEnhancer.enhance(context, item); + isUpdateNeeded = true; + } + } + } - itemEnhancers.stream() - .filter(itemEnhancer -> itemEnhancer.canEnhance(context, item)) - .forEach(itemEnhancer -> itemEnhancer.enhance(context, item)); - - updateItem(context, item); - + if (isUpdateNeeded) { + updateItem(context, item); + } } @Override @@ -68,7 +75,7 @@ private void cleanUpVirtualFields(Context context, Item item) { } try { - itemService.removeMetadataValues(context, item, ListUtils.union(virtualFields, virtualSourceFields)); + itemService.removeMetadataValues(context, item, metadataValuesToRemove); } catch (SQLException e) { throw new SQLRuntimeException(e); } From 38d851a2cb886d2bfea43eee6a56861f96639c5a Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 28 Dec 2023 16:22:48 +0100 Subject: [PATCH 635/686] [DSC-963] Fixes testResources generation --- dspace-server-webapp/pom.xml | 68 +++++++++++++++++++++++++++++++++--- 1 file changed, 64 insertions(+), 4 deletions(-) diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 3a755eaff937..1f21df10c623 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -28,6 +28,66 @@ + + org.codehaus.mojo + properties-maven-plugin + 1.1.0 + + + initialize + + read-project-properties + + + + ${root.basedir}/dspace/config/dspace.cfg + ${root.basedir}/dspace/config/local.cfg + + true + + + + + + maven-resources-plugin + + + testEnvironment + process-resources + + testResources + + + + + ${basedir}/src/test/resources + true + + + + + + webappFiltering + process-resources + + resources + + + + + ${basedir}/src/main/resources + + **/*.properties + **/static/** + **/spring/** + + true + + + + + + org.apache.maven.plugins maven-jar-plugin @@ -333,7 +393,7 @@ com.flipkart.zjsonpatch zjsonpatch - 0.4.6 + 0.4.14 @@ -353,7 +413,7 @@ org.webjars.bowergithub.jquery jquery-dist - 3.6.0 + 3.7.0 @@ -367,7 +427,7 @@ org.webjars.bowergithub.medialize uri.js - 1.19.10 + 1.19.11 @@ -590,7 +650,7 @@ org.exparity hamcrest-date - 2.0.7 + 2.0.8 test From 9b86cb750a61efd71ad7a0dc1465f24d74f8a28b Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 28 Dec 2023 17:09:57 +0100 Subject: [PATCH 636/686] [DSC-963] Fixes server unpacking --- dspace/modules/server/pom.xml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index b61a3515195e..a91491a89f3a 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -12,6 +12,7 @@ + cris-2023.02.00 ${basedir}/../../.. @@ -67,6 +68,26 @@ + + org.apache.maven.plugins + maven-dependency-plugin + + + unpack + prepare-package + + unpack-dependencies + + + runtime + org.dspace + dspace-server-webapp + **/static/**,**/*.properties + ${project.build.directory}/additions + + + + ${basedir}/../../.. From 4b0d938c5df314f535801d0e174ebf23be287546 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Mon, 8 Jan 2024 16:38:50 +0100 Subject: [PATCH 641/686] [DSC-963] Fixes Checkstyle issues --- .../service/OrcidProfileSectionFactoryServiceIT.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java index 60122ba5dbde..fa0b47133a7b 100644 --- a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java @@ -298,7 +298,7 @@ public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreation() { assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); } - + @Test public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithAncestor() { @@ -310,14 +310,13 @@ public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithAncest .withOrgUnitLocality("Milan") .withOrgUnitRinggoldIdentifier("12345") .build(); - + Item orgUnitFather = ItemBuilder.createItem(context, orgUnits) .withTitle("4Science without rin") .withOrgUnitCountry("IT") .withOrgUnitLocality("Milan") .withParentOrganization("4Science with rin", orgUnitGranfather.getID().toString()) .build(); - Item orgUnit = ItemBuilder.createItem(context, orgUnits) .withTitle("4Science") @@ -363,7 +362,7 @@ public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithAncest assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("RINGGOLD")); } - + @Test public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithNoId() { @@ -417,8 +416,7 @@ public void testDisambiguationFromOrgUnitHierarchyOnEmploymentCreationWithNoId() assertThat(organization.getAddress().getCity(), is("Milan")); assertThat(organization.getDisambiguatedOrganization(), nullValue()); } - - + @Test public void testQualificationCreation() { From 2315b9acf4f8596435b9ec9e732f9bcd4fb36b5c Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Mon, 8 Jan 2024 18:40:17 +0100 Subject: [PATCH 642/686] [DSC-1438] Fix tests --- .../dspaceFolder/config/submission-forms.xml | 3642 ++++++++--------- .../SubmissionDefinitionsControllerIT.java | 12 +- .../app/rest/SubmissionFormsControllerIT.java | 26 +- 3 files changed, 1822 insertions(+), 1858 deletions(-) diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index cc41ac0c6e18..15c3574951d9 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -4,275 +4,688 @@ - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - + + + + dc + title + + false + + onebox + Enter the name of the file. + You must enter a main title for this item. + + + + + dc + description + true + + textarea + Enter a description for the file + + + + + +
    + + + + isAuthorOfPublication + person + true + true + + Add an author + + dc + contributor + author + name + + orcid,my_staff_db + + + + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + title + alternative + true + + onebox + If the item has any alternative titles, please enter them here. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + You must enter at least the year. + + + + dc + publisher + + false + + + onebox + Enter the name of the publisher of the previously issued instance of this item. + + + + + + dc + identifier + citation + false + + onebox + Enter the standard citation for the previously issued instance of this item. + + + + + + dc + relation + ispartofseries + true + + Technical Report + series + Enter the series and number assigned to this item by your community. + + + + + + dc + identifier + + + true + + qualdrop_value + If the item has any identification numbers or codes associated with +it, please enter the types and the actual numbers or codes. + + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. + + + + + + dc + language + iso + false + + dropdown + Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + + + + + +
    dc - title - + contributor + author + + name false - - onebox - Enter the name of the file. - You must enter a main title for this item. + You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - dc - description - true - - textarea - Enter a description for the file - + person + affiliation + name + + onebox + false + + Enter the affiliation of the author as stated on the publication. -
    - - - - isAuthorOfPublication - person - true - true - - Add an author - - dc - contributor - author - name - - orcid,my_staff_db - - - + - dc - title - - false - + orgunit + identifier + name + onebox - Enter the main title of the item. - You must enter a main title for this item. - + false + You must enter at least the Orgunit name. + Enter the name of the Orgunit of this item - dc - title - alternative - true - + orgunit + identifier + id + onebox - If the item has any alternative titles, please enter them here. - - - - - - dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - - You must enter at least the year. - - - - dc - publisher - false - - - onebox - Enter the name of the publisher of the previously issued instance of this item. - + + Enter the id of the Orgunit of this item + + +
    + + + dc + contributor + author + true + + group + Enter the names of the authors of this item. + + + + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + title + alternative + true + + onebox + If the item has any alternative titles, please enter them here. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + You must enter at least the year. + + + + dc + publisher + + false + + + onebox + Enter the name of the publisher of the previously issued instance of this item. + + + + + + dc + identifier + citation + false + + onebox + Enter the standard citation for the previously issued instance of this item. + + + + + + dc + relation + ispartofseries + true + + series + Enter the series and number assigned to this item by your community. + + + + + + dc + identifier + + + true + + qualdrop_value + If the item has any identification numbers or codes associated with +it, please enter the types and the actual numbers or codes. + + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. + + + + + + dc + language + iso + false + + dropdown + Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + + + + + + orgunit + identifier + name + true + + inline-group + Enter the names of the orgunit of this item. + + + + +
    + + + dc + subject + + + true + + twobox + Enter appropriate subject keywords or phrases. + + srsc + + + + + dc + description + abstract + false + + textarea + Enter the abstract of the item. + + + + + + dc + description + sponsorship + false + + textarea + Enter the names of any sponsors and/or funding codes in the box. + + + + + + dc + description + + false + + textarea + Enter any other description or comments in this box. + + + + +
    + + + dc + subject + + + true + + twobox + Enter appropriate subject keywords or phrases. + + srsc + + + + + dc + description + abstract + false + + textarea + Enter the abstract of the item. + + + + + + dc + description + sponsorship + false + + textarea + Enter the names of any sponsors and/or funding codes in the box. + + + + + + dc + description + + false + + textarea + Enter any other description or comments in this box. + + + + +
    + + + dc + subject + + + true + + twobox + Enter appropriate subject keywords or phrases. + + srsc + + + + + dc + description + abstract + false + + textarea + Enter the abstract of the item. + + + + + + dc + description + sponsorship + false + + textarea + Enter the names of any sponsors and/or funding codes in the box. + + + + + + dc + description + + false + + textarea + Enter any other description or comments in this box. + + + + +
    + + + isJournalOfVolume + periodical + creativework.publisher:somepublishername + + Select the journal related to this volume. + + + + + + dc + title + + onebox + Enter the name of the journal volume + + + + + publicationVolume + volumeNumber + + onebox + Enter the volume of the journal volume + + + + + creativework + datePublished + + date + Enter the issue date of the journal volume + + + + + dc + description + + textarea + Enter the description of the journal volume + + + + + +
    dc - identifier - citation - false - + contributor + author + onebox - Enter the standard citation for the previously issued instance of this item. - - - - - - dc - relation - ispartofseries - true - - Technical Report - series - Enter the series and number assigned to this item by your community. - - - - - - dc - identifier - - - true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - - - - - - dc - type - - true - - dropdown - Select the type(s) of content of the item. To select more than one value in the list, you may - have to hold down the "CTRL" or "Shift" key. - - + false + You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - dc - language - iso + oairecerif + author + affiliation + + onebox false - - dropdown - Select the language of the main content of the item. If the language does not appear in the - list, please select 'Other'. If the content does not really have a language (for example, if it - is a dataset or an image) please select 'N/A'. - - + + Enter the affiliation of the author as stated on the publication. - -
    + dc contributor - author - - name + editor + + onebox false You must enter at least the author. - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh - or Smith, J]. - + The editors of this publication. - person - affiliation - name + oairecerif + editor + affiliation onebox false - - Enter the affiliation of the author as stated on the publication. + + Enter the affiliation of the editor as stated on the publication. - -
    + - orgunit - identifier - name - + dc + relation + project + onebox false - You must enter at least the Orgunit name. - Enter the name of the Orgunit of this item + You must enter at least the project name. + Enter the name of project, if any, that has supported this publication - orgunit - identifier - id - + dc + relation + grantno + onebox false - - Enter the id of the Orgunit of this item + + - -
    + dc - contributor - author + identifier + + qualdrop_value true - - group - Enter the names of the authors of this item. - + + If the item has any identification numbers or codes associated with +it, please enter the types and the actual numbers or codes. dc title - - false onebox - Enter the main title of the item. + false You must enter a main title for this item. - + Enter the main title of the item. @@ -280,11 +693,11 @@ dc title alternative - true onebox + true + If the item has any alternative titles, please enter them here. - @@ -292,123 +705,74 @@ dc date issued - false - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - - You must enter at least the year. - - - - dc - publisher - - false - - - onebox - Enter the name of the publisher of the previously issued instance of this item. - - - - - - dc - identifier - citation false - - onebox - Enter the standard citation for the previously issued instance of this item. - + You must enter at least the year. + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. dc - relation - ispartofseries + contributor + author + + group true - - series - Enter the series and number assigned to this item by your community. - + + Enter the names of the authors of this item. dc - identifier - - + contributor + editor + + group true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - + + The editors of this publication. dc type - - true - dropdown - Select the type(s) of content of the item. To select more than one value in the list, you may - have to hold down the "CTRL" or "Shift" key. - - + onebox + false + You must select a publication type + Select the type of content of the item. + types + +
    dc language iso - false dropdown - Select the language of the main content of the item. If the language does not appear in the - list, please select 'Other'. If the content does not really have a language (for example, if it - is a dataset or an image) please select 'N/A'. - - - - - - - orgunit - identifier - name - true - - inline-group - Enter the names of the orgunit of this item. - + false + + Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. - -
    dc subject - - - true - twobox + tag + true + Enter appropriate subject keywords or phrases. - - srsc @@ -416,1657 +780,1036 @@ dc description abstract - false textarea + false + Enter the abstract of the item. - + +
    dc - description - sponsorship + relation + publication + + onebox false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - + + The publication where this publication is included. E.g. a book chapter lists here the book, a contribution to a conference lists here the conference proceeding. + book part dc - description - + relation + isbn + + onebox false - - textarea - Enter any other description or comments in this box. - - - - -
    - - - dc - subject - - - true - - twobox - Enter appropriate subject keywords or phrases. - - srsc + + The ISBN of the book/report if it was not found in the system + book part dc - description - abstract + relation + doi + + onebox false - - textarea - Enter the abstract of the item. - + + The DOI of the book/report if it was not found in the system + book part dc - description - sponsorship + relation + ispartof + + onebox false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - + + The journal or Serie where this publication has been published dc - description - - false - - textarea - Enter any other description or comments in this box. - + relation + ispartofseries + + series + true + + Enter the series and number assigned to this item by your community. - -
    dc - subject - - - true - - twobox - Enter appropriate subject keywords or phrases. - - srsc + relation + issn + + onebox + false + + The journal or Serie ISSN if it was not found in the system dc - description - abstract + coverage + publication + + onebox false - - textarea - Enter the abstract of the item. - + + The publication object of the review + review,book review dc - description - sponsorship + coverage + isbn + + onebox false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - + + The ISBN of the reviewed item if it was not found in the system + review,book review dc - description - + coverage + doi + + onebox false - - textarea - Enter any other description or comments in this box. - + + The DOI of the reviewed item if it was not found in the system + review,book review - -
    - - - isJournalOfVolume - periodical - creativework.publisher:somepublishername - - Select the journal related to this volume. - - - dc - title - + description + sponsorship + onebox - Enter the name of the journal volume + true + + Enter the name of any sponsors. - publicationVolume - volumeNumber + dc + description + volume onebox - Enter the volume of the journal volume - - - - - creativework - datePublished - - date - Enter the issue date of the journal volume + false + + If applicable, the volume of the publishing channel where this publication appeared dc description - - textarea - Enter the description of the journal volume + issue + + onebox + false + + If applicable, the issue of the publishing channel where this publication appeared - - - -
    dc - contributor - author - + description + startpage + onebox false - You must enter at least the author. - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh - or Smith, J]. - + + If applicable, the page where this publication starts - oairecerif - author - affiliation - + dc + description + endpage + onebox false - - Enter the affiliation of the author as stated on the publication. + + If applicable, the page where this publication ends -
    + dc - contributor - editor - - onebox - false - You must enter at least the author. - The editors of this publication. + relation + project + + group + true + + Enter the name of project, if any, that has produced this publication. - oairecerif - editor - affiliation - + dc + relation + conference + onebox - false - - Enter the affiliation of the editor as stated on the publication. + true + + Enter the name of the conference where the item has been presented, if any. - -
    dc relation - project - + product + onebox - false - You must enter at least the project name. - Enter the name of project, if any, that has supported this publication + true + + Link the item to one or more existent dataset in the repository used or described by the publication or, put here the dataset citation dc - relation - grantno - + identifier + citation + onebox false - - + + Enter the standard citation for the previously issued instance of this item. - -
    dc - identifier - - qualdrop_value - true - - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - + description + + textarea + false + + Enter any other description or comments in this box. dc - title - + description + sponsorship + onebox - false - You must enter a main title for this item. - Enter the main title of the item. + true + + Enter the name of any sponsors. + + + +
    + + + dc + title + + onebox + false + You must specify a title for the patent + The title of the patent + + + + + dcterms + dateAccepted + + date + false + + The Approval date. + You can leave out the day and/or month if they aren't applicable. + + + + + dc + date + issued + + date + false + + The registration date of the patent. + You can leave out the day and/or month if they aren't applicable. + + + + + dc + contributor + author + + name + true + + The inventor: The actual devisor of an invention that is the subject of a patent. + + + + + dcterms + rightsHolder + + onebox + true + + The holders of this patent + + + + + dc + publisher + + onebox + true + + The issuer of the patent: the patent office + + + + + dc + identifier + patentno + + onebox + false + + The patent number. + + + + + dc + identifier + patentnumber + + onebox + false + + The patent number. + + + + + dc + type + + dropdown + false + You must select a patent type + Select the type of content of the patent. + + + + + dc + identifier + applicationnumber + + onebox + false + + The Application number. + + + + + dc + date + filled + + date + false + + The date Filled. + + + +
    + + + dc + language + iso + + dropdown + false + + Select the country and its language. + + + + + dc + subject + + onebox + true + + Enter appropriate subject keywords or phrases. + + + + + dc + description + abstract + + textarea + false + + Enter the description of the patent. + + + +
    + + + dc + relation + + onebox + true + + Enter the name of project, if any, that has produced this patent. + + + + + dc + relation + patent + + onebox + true + + Patents that precede (i.e., have priority over) this patent + + + + + dc + relation + references + + onebox + true + + Result outputs that are referenced by this patent + + + + +
    dc title - alternative - - onebox - true - - If the item has any alternative titles, please enter them here. - - - - - dc - date - issued - - date - false - You must enter at least the year. - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - - - - - - dc - contributor - author - - group - true - - Enter the names of the authors of this item. - - - - - dc - contributor - editor - - group - true - - The editors of this publication. - - - - - dc - type - - onebox - false - You must select a publication type - Select the type of content of the item. - types - - - -
    - - - dc - language - iso - - dropdown - false - - Select the language of the main content of the item. If the language does not appear in the - list, please select 'Other'. If the content does not really have a language (for example, if it - is a dataset or an image) please select 'N/A'. - - - - - - dc - subject - - tag - true - - Enter appropriate subject keywords or phrases. - - - - - dc - description - abstract - - textarea - false - - Enter the abstract of the item. - - - -
    - - - dc - relation - publication - - onebox - false - - The publication where this publication is included. E.g. a book chapter lists here the book, a - contribution to a conference lists here the conference proceeding. - - book part - - - - - dc - relation - isbn - - onebox - false - - The ISBN of the book/report if it was not found in the system - book part - - - - - dc - relation - doi - - onebox - false - - The DOI of the book/report if it was not found in the system - book part - - - - - dc - relation - ispartof - - onebox - false - - The journal or Serie where this publication has been published - - - - - dc - relation - ispartofseries - - series - true - - Enter the series and number assigned to this item by your community. - - - - - dc - relation - issn - - onebox - false - - The journal or Serie ISSN if it was not found in the system - - - - - dc - coverage - publication - - onebox - false - - The publication object of the review - review,book review - - - - - dc - coverage - isbn - - onebox - false - - The ISBN of the reviewed item if it was not found in the system - review,book review - - - - - dc - coverage - doi - - onebox - false - - The DOI of the reviewed item if it was not found in the system - review,book review - - - - - dc - description - sponsorship - - onebox - true - - Enter the name of any sponsors. - - - - - dc - description - volume - - onebox - false - - If applicable, the volume of the publishing channel where this publication appeared - - - - - dc - description - issue - - onebox - false - - If applicable, the issue of the publishing channel where this publication appeared - - - - - dc - description - startpage - - onebox - false - - If applicable, the page where this publication starts - - - - - dc - description - endpage - - onebox - false - - If applicable, the page where this publication ends - - - -
    - - - dc - relation - project - - group - true - - Enter the name of project, if any, that has produced this publication. - - - - - dc - relation - conference - - onebox - true - - Enter the name of the conference where the item has been presented, if any. - - - - - dc - relation - product - - onebox - true - - Link the item to one or more existent dataset in the repository used or described by the - publication or, put here the dataset citation - - - - - - dc - identifier - citation - - onebox - false - - Enter the standard citation for the previously issued instance of this item. - - - - - dc - description - - textarea - false - - Enter any other description or comments in this box. - - - - - dc - description - sponsorship - - onebox - true - - Enter the name of any sponsors. - - - - - -
    - - - dc - title - - onebox - false - You must specify a title for the patent - The title of the patent - - - - - dcterms - dateAccepted - - date - false - - The Approval date. - You can leave out the day and/or month if they aren't applicable. - - - - - - dc - date - issued - - date - false - - The registration date of the patent. - You can leave out the day and/or month if they aren't applicable. - - - - - - dc - contributor - author - - name - true - - The inventor: The actual devisor of an invention that is the subject of a patent. - - - - - dcterms - rightsHolder - - onebox - true - - The holders of this patent - - - - - dc - publisher - - onebox - true - - The issuer of the patent: the patent office - - - - - dc - identifier - patentno - - onebox - false - - The patent number. - - - - - dc - identifier - patentnumber - - onebox - false - - The patent number. - - - - - dc - type - - dropdown - false - You must select a patent type - Select the type of content of the patent. - - - - - dc - identifier - applicationnumber - - onebox - false - - The Application number. - - - - - dc - date - filled - - date - false - - The date Filled. - - - -
    - - - dc - language - iso - - dropdown - false - - Select the country and its language. - - - - - dc - subject - - onebox - true - - Enter appropriate subject keywords or phrases. - - - - - dc - description - abstract - - textarea - false - - Enter the description of the patent. - - - -
    - - - dc - relation - - onebox - true - - Enter the name of project, if any, that has produced this patent. - - - - - dc - relation - patent - - onebox - true - - Patents that precede (i.e., have priority over) this patent - - - - - dc - relation - references - - onebox - true - - Result outputs that are referenced by this patent - - - - -
    - - - dc - title - - name - false - You must enter least at the Surname. - - - - - - crisrp - name - - name - false - - - - - - - crisrp - name - translated - - name - false - - - - - - - crisrp - name - variant - - name - true - - - - - - - person - givenName - - onebox - false - - - - - person - familyName - - onebox - false - - - - - - - person - birthDate - - date - false - - - - - oairecerif - person - gender - - dropdown - false - - - - - - - person - jobTitle - - onebox - false - - - - - person - affiliation - name - - onebox + + name false - - + You must enter least at the Surname. + crisrp - workgroup - - onebox - true - - - - - - - oairecerif - identifier - url - - group - true - - - - - - - person - email - - onebox - false - - - - - - - dc - subject - - tag - true - - - - - - - person - identifier - orcid - - onebox - false - - - - - - - person - identifier - scopus-author-id - - onebox - true - - - - - - - person - identifier - rid - - onebox - true - - - - - - - oairecerif - person - affiliation - - group - true - - - - - - - dc - description - abstract - - textarea + name + + name false - - - - - - - crisrp - education - - group - true - - + + crisrp - country - - onebox + name + translated + + name false - - + + crisrp - qualification - - group + name + variant + + name true - - + + person - knowsLanguage - - dropdown - true - - - - - - - cris - policy - eperson - - onebox - false - - - - - - - cris - policy - group - - onebox - false - - - - - -
    - - - oairecerif - affiliation - role - false - - - onebox - - - - - oairecerif - person - affiliation - false - - - onebox - - You must enter at least the organisation of your affiliation. - - - oairecerif - affiliation - startDate - false - - - date - - - - - oairecerif - affiliation - endDate - false - - - date - - - - - -
    - - - crisrp - qualification - + givenName + onebox false - You must enter at least the qualification title. - - - - - - crisrp - qualification - start - - date - false - - - - - - - crisrp - qualification - end - - date - false - - - - - -
    - - - crisrp - education - + + + + + person + familyName + onebox false - You must enter at least the education title. - + + - crisrp - education - start - + person + birthDate + date false - - + + - crisrp - education - end - - date + oairecerif + person + gender + + dropdown false - - + + - crisrp - education - role - + person + jobTitle + onebox false - - + + - - -
    - - oairecerif - identifier - url - + person + affiliation + name + onebox false - You must enter at least the site url. - + + crisrp - site - title - + workgroup + onebox - false - - + true + + - - -
    - dc + oairecerif identifier - + url + + group true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - - please give an identifier + + - - -
    - dc - title - - false - + person + email + onebox - Enter the main title of the item. - You must enter a main title for this item. - + false + + dc - date - issued - false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. - - You must enter at least the year. + subject + + tag + true + + - dc - type - - true - - dropdown - Select the type(s) of content of the item. To select more than one value in the list, you may - have to hold down the "CTRL" or "Shift" key. - - + person + identifier + orcid + + onebox + false + + - - dc + person identifier - isbn - true - - Book + scopus-author-id + onebox - Enter the ISBN of the book. - An ISBN is required. + true + + - + + - dc + person identifier - isbn - true - - Book chapter + rid + onebox - Enter the ISBN of the book in which this chapter appears. - + true + + - - -
    - dc - contributor - author - - name - false - You must enter at least the author. - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh - or Smith, J]. - + oairecerif + person + affiliation + + group + true + + - person - affiliation - name - - onebox + dc + description + abstract + + textarea false - - Enter the affiliation of the author as stated on the publication. + + - dc - language - iso - false - - dropdown - Select the language of the item. - - + crisrp + education + + group + true + + - dc - type - + crisrp + country + onebox false - You must select a publication type - Select the type(s) of content of the item. - publication-coar-types + + - - -
    - dc - contributor - author + crisrp + qualification + + group true - - onebox - Author field that can be associated with an authority providing suggestion - + + - dc - contributor - editor - false - - name - Editor field that can be associated with an authority providing the special name lookup - + person + knowsLanguage + + dropdown + true + + - dc - subject - true - + cris + policy + eperson + onebox - Subject field that can be associated with an authority providing lookup - + false + + - - -
    - dc - title - - false - + cris + policy + group + onebox - Enter the main title of the item. - You must enter a main title for this item. - + false + + + + +
    + + + oairecerif + affiliation + role + false + + + onebox + + + + + oairecerif + person + affiliation + false + + + onebox + + You must enter at least the organisation of your affiliation. + + + oairecerif + affiliation + startDate + false + + + date + + + + + oairecerif + affiliation + endDate + false + + + date + + + + + +
    + - dc - subject - true - + crisrp + qualification + onebox - Subject field that can be associated with an authority providing lookup - + false + You must enter at least the qualification title. + + + - dc - description - abstract + crisrp + qualification + start + + date false - - textarea - Enter the abstract of the item. - + + - dc - date - issued - false - - + crisrp + qualification + end + date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't applicable. - - You must enter at least the year. + false + + -
    + - dc - title - - false - + crisrp + education + onebox - Enter the main title of the item. - You must enter a main title for this item. - + false + You must enter at least the education title. + - - -
    - dc - title - + crisrp + education + start + + date false - - onebox + + - dc - date - issued - false - + crisrp + education + end + date - submission - submission - - - dc - type - true - - onebox - submission - - - dc - language - iso - true - - onebox - all + false + + + + - dc - contributor - author - true - + crisrp + education + role + onebox - workflow + false + + + + +
    + - dc - contributor - editor - true - + oairecerif + identifier + url + onebox - submission - all + false + You must enter at least the site url. + + + - dc - subject - true - + crisrp + site + title + onebox - workflow - workflow - - - dc - description false - - onebox - workflow - submission + + -
    + + + + dc + identifier + + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + please give an identifier + + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + You must enter at least the year. + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. + + + + + + + dc + identifier + isbn + true + + Book + onebox + Enter the ISBN of the book. + An ISBN is required. + + + + dc + identifier + isbn + true + + Book chapter + onebox + Enter the ISBN of the book in which this chapter appears. + + + + + +
    dc - title - + contributor + author + + name false - - onebox - Enter the main title of the item. - You must enter a main title for this item. + You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. - dc - date - issued + person + affiliation + name + + onebox false - - - date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't applicable. - - You must enter at least the year. + + Enter the affiliation of the author as stated on the publication. + + + dc + language + iso + false + + dropdown + Select the language of the item. + + + + dc @@ -2081,32 +1824,253 @@ -
    - + + - dc - title - - false - - onebox - Field required + dc + contributor + author + true + + onebox + Author field that can be associated with an authority providing suggestion + - + + + + dc + contributor + editor + false + + name + Editor field that can be associated with an authority providing the special name lookup + + + + + + dc + subject + true + + onebox + Subject field that can be associated with an authority providing lookup + + + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + dc + subject + true + + onebox + Subject field that can be associated with an authority providing lookup + + + + dc + description + abstract + false + + textarea + Enter the abstract of the item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't applicable. + + You must enter at least the year. + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + +
    + + + dc + title + + false + + onebox + + + dc + date + issued + false + + date + submission + submission + + + dc + type + true + + onebox + submission + + + dc + language + iso + true + + onebox + all + + + dc + contributor + author + true + + onebox + workflow + + + dc + contributor + editor + true + + onebox + submission + all + + + dc + subject + true + + onebox + workflow + workflow + + + dc + description + false + + onebox + workflow + submission + + + + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't applicable. + You must enter at least the year. + + + + + dc + type + + onebox + false + You must select a publication type + Select the type(s) of content of the item. + publication-coar-types + + + + +
    + + + dc + title + + false + + onebox + Field required + +
    - - - dc - type - - false - - onebox - Field required - - + + + dc + type + + false + + onebox + Field required + +
    @@ -2396,36 +2360,36 @@
    - + - - - - - - - - - - - - + + + + + + + + + + + + - - - - DOI - doi - - - Scopus ID - scopus - - - WOS ID - isi - + + + + DOI + doi + + + Scopus ID + scopus + + + WOS ID + isi + Ads Code adsbibcode @@ -2438,172 +2402,172 @@ arXiv ID arxiv - - ISSN - issn - - - Other - other - - - ISMN - ismn - - - Gov't Doc # - govdoc - - - URI - uri - - - ISBN - isbn - - + + ISSN + issn + + + Other + other + + + ISMN + ismn + + + Gov't Doc # + govdoc + + + URI + uri + + + ISBN + isbn + + - - - Animation - Animation - - - Article - Article - - - Book - Book - - - Book chapter - Book chapter - - - Dataset - Dataset - - - Learning Object - Learning Object - - - Image - Image - - - Image, 3-D - Image, 3-D - - - Map - Map - - - Musical Score - Musical Score - - - Plan or blueprint - Plan or blueprint - - - Preprint - Preprint - - - Presentation - Presentation - - - Recording, acoustical - Recording, acoustical - - - Recording, musical - Recording, musical - - - Recording, oral - Recording, oral - - - Software - Software - - - Technical Report - Technical Report - - - Thesis - Thesis - - - Video - Video - - - Working Paper - Working Paper - - - Other - Other - - + + + Animation + Animation + + + Article + Article + + + Book + Book + + + Book chapter + Book chapter + + + Dataset + Dataset + + + Learning Object + Learning Object + + + Image + Image + + + Image, 3-D + Image, 3-D + + + Map + Map + + + Musical Score + Musical Score + + + Plan or blueprint + Plan or blueprint + + + Preprint + Preprint + + + Presentation + Presentation + + + Recording, acoustical + Recording, acoustical + + + Recording, musical + Recording, musical + + + Recording, oral + Recording, oral + + + Software + Software + + + Technical Report + Technical Report + + + Thesis + Thesis + + + Video + Video + + + Working Paper + Working Paper + + + Other + Other + + - - - - N/A - - - - English (United States) - en_US - - - English - en - - - Spanish - es - - - German - de - - - French - fr - - - Italian - it - - - Japanese - ja - - - Chinese - zh - - - Turkish - tr - - - (Other) - other - - + + + + N/A + + + + English (United States) + en_US + + + English + en + + + Spanish + es + + + German + de + + + French + fr + + + Italian + it + + + Japanese + ja + + + Chinese + zh + + + Turkish + tr + + + (Other) + other + + Italia @@ -2625,9 +2589,9 @@ Botswana BW - - - + + + Use by owner Use by owner @@ -2823,7 +2787,7 @@ Unknown - + Male @@ -2839,6 +2803,6 @@ - + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index 9af95845d407..269de3aefb9d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -321,10 +321,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=16"), Matchers.containsString("size=1")))) + Matchers.containsString("page=18"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(17))) - .andExpect(jsonPath("$.page.totalPages", is(17))) + .andExpect(jsonPath("$.page.totalElements", is(19))) + .andExpect(jsonPath("$.page.totalPages", is(19))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -347,10 +347,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=16"), Matchers.containsString("size=1")))) + Matchers.containsString("page=18"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(17))) - .andExpect(jsonPath("$.page.totalPages", is(17))) + .andExpect(jsonPath("$.page.totalElements", is(19))) + .andExpect(jsonPath("$.page.totalPages", is(19))) .andExpect(jsonPath("$.page.number", is(1))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index 27a7ccb0985d..d817b573ff2b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -73,7 +73,7 @@ public void findAll() throws Exception { .andExpect(content().contentType(contentType)) //The configuration file for the test env includes PAGE_TOTAL_ELEMENTS forms .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) .andExpect(jsonPath("$.page.totalPages", equalTo(2))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect( @@ -90,7 +90,7 @@ public void findAllWithNewlyCreatedAccountTest() throws Exception { .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(34))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) .andExpect(jsonPath("$.page.totalPages", equalTo(2))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL @@ -670,10 +670,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=1"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=16"), Matchers.containsString("size=2")))) + Matchers.containsString("page=19"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(34))) - .andExpect(jsonPath("$.page.totalPages", equalTo(17))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) + .andExpect(jsonPath("$.page.totalPages", equalTo(20))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -681,8 +681,8 @@ public void findAllPaginationTest() throws Exception { .param("page", "15")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("publication_indexing"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("publication-dc-contributor-author"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("publication-dc-contributor-editor"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -697,10 +697,10 @@ public void findAllPaginationTest() throws Exception { Matchers.containsString("page=16"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=16"), Matchers.containsString("size=2")))) + Matchers.containsString("page=19"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(34))) - .andExpect(jsonPath("$.page.totalPages", equalTo(17))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) + .andExpect(jsonPath("$.page.totalPages", equalTo(20))) .andExpect(jsonPath("$.page.number", is(15))); } @@ -744,10 +744,10 @@ public void visibilityTest() throws Exception { Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=16"), Matchers.containsString("size=2")))) + Matchers.containsString("page=19"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(34))) - .andExpect(jsonPath("$.page.totalPages", equalTo(17))) + .andExpect(jsonPath("$.page.totalElements", equalTo(40))) + .andExpect(jsonPath("$.page.totalPages", equalTo(20))) .andExpect(jsonPath("$.page.number", is(4))); } } From 2482b012cd4adbe65f036281fce38365260ac50f Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Mon, 8 Jan 2024 18:57:30 +0100 Subject: [PATCH 643/686] [DSC-1438] Update input form configuration according to input form xls file --- dspace/config/item-submission.xml | 11 +- dspace/config/submission-forms.xml | 955 ++++++++++++++++------------- 2 files changed, 528 insertions(+), 438 deletions(-) diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 6af3ca6d5a83..35c9ffdc8de4 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -1,6 +1,5 @@ - @@ -172,8 +171,8 @@ cclicense - - submit.progressbar.accessCondition + + submit.progressbar.itemAccessConditions org.dspace.app.rest.submit.step.AccessConditionStep accessCondition @@ -192,12 +191,12 @@ submission - submit.progressbar.CustomUrlStep + submit.progressbar.custom-url org.dspace.app.rest.submit.step.CustomUrlStep custom-url - submit.progressbar.correction-step + submit.progressbar.correction org.dspace.app.rest.submit.step.CorrectionStep correction workflow @@ -212,6 +211,7 @@ + @@ -352,4 +352,3 @@ - diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index ac57d73923b5..86212e2f884c 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -1,6 +1,5 @@ -
    @@ -8,33 +7,33 @@ dc title - false onebox - + false You must enter a title for the file. + dc description - false textarea - + false + dc type - false dropdown - Personal picture, logo, main article, etc. + false + Personal picture, logo, main article, etc. @@ -55,11 +54,11 @@ dc contributor author - false onebox - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + false You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. @@ -67,11 +66,11 @@ oairecerif author affiliation - false onebox - Enter the affiliation of the author as stated on the publication. + false + Enter the affiliation of the author as stated on the publication. @@ -81,11 +80,11 @@ dc contributor editor - false onebox - The editors of this publication. + false You must enter at least the author. + The editors of this publication. @@ -93,11 +92,11 @@ oairecerif editor affiliation - false onebox - Enter the affiliation of the editor as stated on the publication. + false + Enter the affiliation of the editor as stated on the publication. @@ -107,11 +106,11 @@ dc relation funding - false onebox - Enter the name of funding, if any, that has supported this publication + false You must enter at least the funding name. + Enter the name of funding, if any, that has supported this publication @@ -119,11 +118,11 @@ dc relation grantno - false onebox - If the funding is not found in the system please enter the funding identifier / grant no + false + If the funding is not found in the system please enter the funding identifier / grant no @@ -133,11 +132,11 @@ dc relation funding - false onebox - Enter the name of funding, if any, that has supported this product + false You must enter at least the funding name. + Enter the name of funding, if any, that has supported this product @@ -145,11 +144,37 @@ dc relation grantno - false onebox + false + If the funding is not found in the system please enter the funding identifier / grant no + + + +
    + + + dc + relation + publication + + onebox + false + You must enter at least the publication title / citation + + Enter the publication title or citation, if any, that uses this product + + + dc + relation + doi + + onebox + false + + If the publication is not found in the system please enter the DOI identifier @@ -159,11 +184,11 @@ dc relation funding - false onebox - Enter the name of funding, if any, that has supported this patent + false You must enter at least the funding name. + Enter the name of funding, if any, that has supported this patent @@ -171,11 +196,11 @@ dc relation grantno - false onebox - If the funding is not found in the system please enter the funding identifier / grant no + false + If the funding is not found in the system please enter the funding identifier / grant no @@ -233,45 +258,45 @@ oairecerif affiliation role - false - onebox - + false + + oairecerif person affiliation - false - onebox - + false You must enter at least the organisation of your affiliation. + + oairecerif affiliation startDate - false - date - + false + + oairecerif affiliation endDate - false - date - + false + + @@ -280,45 +305,45 @@ crisrp qualification - false - onebox - + false You must enter the organisation + + crisrp qualification role - false - onebox - + false You must enter the qualification title. + + crisrp qualification start - false - date - + false + + crisrp qualification end - false - date - + false + + @@ -328,44 +353,44 @@ crisrp education role - false - onebox - + false You must enter the degree/title + + crisrp education - false - onebox - + false You must enter the organisation + + crisrp education start - false - date - + false + + crisrp education end - false - date - + false + + @@ -375,11 +400,11 @@ dc contributor author - false - name - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + onebox + false You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. @@ -387,11 +412,11 @@ oairecerif author affiliation - false onebox - Enter the affiliation of the author as stated on the publication. + false + Enter the affiliation of the author as stated on the publication. @@ -401,11 +426,11 @@ dc contributor author - false - name - Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + onebox + false You must enter at least the inventor. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. @@ -413,11 +438,11 @@ oairecerif author affiliation - false onebox - Enter the affiliation of the author as stated on the publication. + false + Enter the affiliation of the author as stated on the publication. @@ -427,11 +452,11 @@ oairecerif identifier url - false onebox - + false You must enter at least the site url. + @@ -439,11 +464,11 @@ crisrp site title - false onebox - + false + @@ -453,55 +478,55 @@ dc identifier issn - false onebox - + false + dc title - false onebox - + false You must enter a main title for this item. + dc publisher - false onebox - + false + dc subject - true tag - + true + dc description - false textarea - + false + @@ -510,23 +535,23 @@ dc identifier - true qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. + true + If the item has any identification numbers or codes associated with +it, please enter the types and the actual numbers or codes. dc title - false onebox - Enter the main title of the item. + false You must enter a main title for this item. + Enter the main title of the item. @@ -534,11 +559,11 @@ dc title alternative - true onebox - If the item has any alternative titles, please enter them here. + true + If the item has any alternative titles, please enter them here. @@ -546,13 +571,13 @@ dc date issued - false date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't - applicable. + false You must enter at least the year. + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. @@ -560,11 +585,11 @@ dc contributor author - true group - Enter the names of the authors of this item. + true + Enter the names of the authors of this item. @@ -572,22 +597,22 @@ dc contributor editor - true group - The editors of this publication. + true + The editors of this publication. dc type - false onebox - Select the type(s) of content of the item. + false You must select a publication type + Select the type(s) of content of the item. publication-coar-types @@ -598,22 +623,22 @@ dc language iso - false dropdown - Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + false + Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. dc subject - true tag - Enter appropriate subject keywords or phrases. + true + Enter appropriate subject keywords or phrases. @@ -621,11 +646,11 @@ datacite subject fos - true onebox - + true + oecd @@ -634,11 +659,11 @@ dc description abstract - false textarea - Enter the abstract of the item. + false + Enter the abstract of the item. @@ -648,12 +673,12 @@ dc relation publication - false - publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 onebox - The publication where this publication is included. E.g. a book chapter lists here the book, a contribution to a conference lists here the conference proceeding. + false + The publication where this publication is included. E.g. a book chapter lists here the book, a contribution to a conference lists here the conference proceeding. + publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 @@ -661,12 +686,12 @@ dc relation isbn - false - publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 onebox - The ISBN of the book/report if it was not found in the system + false + The ISBN of the book/report if it was not found in the system + publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 @@ -674,12 +699,12 @@ dc relation doi - false - publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 onebox - The DOI of the book/report if it was not found in the system + false + The DOI of the book/report if it was not found in the system + publication-coar-types:c_3248,publication-coar-types:c_5794,publication-coar-types:c_6670 @@ -687,11 +712,11 @@ dc relation journal - false onebox - The journal or Serie where this publication has been published + false + The journal or Serie where this publication has been published @@ -699,11 +724,11 @@ dc relation ispartofseries - true series - Enter the series and number assigned to this item by your community. + true + Enter the series and number assigned to this item by your community. @@ -711,11 +736,11 @@ dc relation issn - false onebox - The journal or Serie ISSN if it was not found in the system + false + The journal or Serie ISSN if it was not found in the system @@ -723,12 +748,12 @@ dc coverage publication - false - publication-coar-types:c_efa0,publication-coar-types:c_ba08 onebox - The publication object of the review + false + The publication object of the review + publication-coar-types:c_efa0,publication-coar-types:c_ba08 @@ -736,12 +761,12 @@ dc coverage isbn - false - publication-coar-types:c_efa0,publication-coar-types:c_ba08 onebox - The ISBN of the reviewed item if it was not found in the system + false + The ISBN of the reviewed item if it was not found in the system + publication-coar-types:c_efa0,publication-coar-types:c_ba08 @@ -749,12 +774,12 @@ dc coverage doi - false - publication-coar-types:c_efa0,publication-coar-types:c_ba08 onebox - The DOI of the reviewed item if it was not found in the system + false + The DOI of the reviewed item if it was not found in the system + publication-coar-types:c_efa0,publication-coar-types:c_ba08 @@ -762,11 +787,11 @@ dc description sponsorship - true onebox - Enter the name of any sponsors. + true + Enter the name of any sponsors. @@ -774,11 +799,11 @@ oaire citation volume - false onebox - If applicable, the volume of the publishing channel where this publication appeared + false + If applicable, the volume of the publishing channel where this publication appeared @@ -786,11 +811,11 @@ oaire citation issue - false onebox - If applicable, the issue of the publishing channel where this publication appeared + false + If applicable, the issue of the publishing channel where this publication appeared @@ -798,11 +823,11 @@ oaire citation startPage - false onebox - If applicable, the page where this publication starts + false + If applicable, the page where this publication starts @@ -810,11 +835,11 @@ oaire citation endPage - false onebox - If applicable, the page where this publication ends + false + If applicable, the page where this publication ends @@ -824,11 +849,11 @@ dc relation funding - true group - Acknowledge the funding received for this publication. + true + Acknowledge the funding received for this publication. @@ -836,11 +861,11 @@ dc relation project - true onebox - Enter the name of project, if any, that has produced this publication. It is NOT necessary to list the projects connected with an acknowledge funding. + true + Enter the name of project, if any, that has produced this publication. It is NOT necessary to list the projects connected with an acknowledge funding. @@ -848,11 +873,11 @@ dc relation conference - true onebox - Enter the name of the conference where the item has been presented, if any. + true + Enter the name of the conference where the item has been presented, if any. @@ -860,11 +885,11 @@ dc relation product - true onebox - Link the item to one or more existent dataset in the repository used or described by the publication or, put here the dataset citation + true + Link the item to one or more existent dataset in the repository used or described by the publication or, put here the dataset citation @@ -872,22 +897,22 @@ dc identifier citation - false onebox - Enter the standard citation for the previously issued instance of this item. + false + Enter the standard citation for the previously issued instance of this item. dc description - false textarea - Enter any other description or comments in this box. + false + Enter any other description or comments in this box. @@ -895,11 +920,11 @@ dc description sponsorship - true onebox - Enter the name of any sponsors. + true + Enter the name of any sponsors. @@ -908,23 +933,23 @@ dc identifier - true qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. + true + If the item has any identification numbers or codes associated with +it, please enter the types and the actual numbers or codes. dc title - false onebox - Enter the main title of the item. + false You must enter a main title for this item. + Enter the main title of the item. @@ -932,11 +957,11 @@ dc title alternative - true onebox - If the item has any alternative titles, please enter them here. + true + If the item has any alternative titles, please enter them here. @@ -944,11 +969,11 @@ dc date issued - false date - Please give the date of previous publication or public distribution. You can leave out the day and/or month if they aren't applicable. + false You must enter at least the year. + Please give the date of previous publication or public distribution. You can leave out the day and/or month if they aren't applicable. @@ -956,11 +981,11 @@ dc description version - false onebox - If applicable, the version of the product + false + If applicable, the version of the product @@ -968,22 +993,22 @@ dc contributor author - true group - Enter the names of the authors of this item. + true + Enter the names of the authors of this item. dc type - false onebox - Nothing to do here. Note for administrators, this metadata could be completely hide using template item + false + Nothing to do here. Note for administrators, this metadata could be completely hide using template item product-coar-types @@ -994,22 +1019,22 @@ dc language iso - false dropdown - Select, if applicable, the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. + false + Select, if applicable, the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. dc subject - true tag - Enter appropriate subject keywords or phrases. + true + Enter appropriate subject keywords or phrases. @@ -1017,11 +1042,11 @@ datacite subject fos - true onebox - + true + oecd @@ -1030,11 +1055,22 @@ dc description abstract - false textarea + false + Enter the abstract of the item. + + + + + dc + description + + textarea + false + Enter any other relevant information about the dataset. @@ -1043,11 +1079,11 @@ dc publisher - true onebox - The publisher or publishers of this product + true + The publisher or publishers of this product @@ -1055,11 +1091,11 @@ dc relation ispartofseries - true series - Link to the research output of which this product is a part (e.g. a data set collection that contains it). + true + Link to the research output of which this product is a part (e.g. a data set collection that contains it). @@ -1067,11 +1103,11 @@ dc relation issn - false onebox - The journal or Serie ISSN if it was not found in the system + false + The journal or Serie ISSN if it was not found in the system @@ -1079,11 +1115,11 @@ dc relation funding - true group - Acknowledge the funding received for this product. + true + Acknowledge the funding received for this product. @@ -1091,11 +1127,11 @@ dc relation project - true onebox - Enter the name of project, if any, that has produced this product. It is NOT necessary to list the projects connected with an acknowledge funding. + true + Enter the name of project, if any, that has produced this product. It is NOT necessary to list the projects connected with an acknowledge funding. @@ -1103,11 +1139,11 @@ dc relation conference - false onebox - The event where this product was presented or that is recorded in the product. + false + The event where this product was presented or that is recorded in the product. @@ -1115,11 +1151,11 @@ dc relation equipment - true onebox - The equipment that generated this product + true + The equipment that generated this product @@ -1127,11 +1163,11 @@ dc relation references - true onebox - Result outputs that are referenced by this product + true + Result outputs that are referenced by this product @@ -1139,11 +1175,11 @@ dc relation publication - true - - onebox - Result outputs that use this product + + inline-group + true + Result outputs that use this product @@ -1153,11 +1189,11 @@ dc identifier patentno - false onebox - The patent number + false + The patent number @@ -1174,11 +1210,11 @@ dc identifier applicationnumber - false onebox - The application number + false + The application number dc @@ -1186,30 +1222,31 @@ issued date - Date on which the application was physically received at the Patent Authority. Also named Filling Date + false + Date on which the application was physically received at the Patent Authority. Also named Filling Date. dc title - false onebox - The title of the patent + false You must specify a title for the patent + The title of the patent dcterms dateAccepted - false date - Date on which the application has been granted by the Patent Office. + false + Date on which the application has been granted by the Patent Office. @@ -1218,8 +1255,9 @@ dateSubmitted date - Date of making available to the public by printing or similar process of a patent document on which grant has taken place on or before the said date + false + Date of making available to the public by printing or similar process of a patent document on which grant has taken place on or before the said date. @@ -1227,44 +1265,44 @@ dc contributor author - true group - The inventor: The actual devisor of an invention that is the subject of a patent. + true + The inventor: The actual devisor of an invention that is the subject of a patent. dcterms rightsHolder - true onebox - The holders of this patent + true + The holders of this patent dc publisher - true onebox - The issuer of the patent: the patent office + true + The issuer of the patent: the patent office dc type - false onebox - Select the type of content of the patent. + false You must select a patent type + Select the type of content of the patent. patent-coar-types @@ -1287,22 +1325,35 @@ dc language iso - false dropdown - Select the country and its language. + false + Select the country and its language. dc subject - true onebox + true + Enter appropriate subject keywords or phrases. + + + + + datacite + subject + fos + + onebox + true + + oecd @@ -1310,11 +1361,11 @@ dc description abstract - false textarea - Enter the description of the patent. + false + Enter the description of the patent. @@ -1324,11 +1375,11 @@ dc relation funding - true group - Acknowledge the funding received for this patent. + true + Acknowledge the funding received for this patent. @@ -1336,11 +1387,11 @@ dc relation project - true onebox - Enter the name of project, if any, that has produced this patent. It is NOT necessary to list the projects connected with an acknowledge funding. + true + Enter the name of project, if any, that has produced this patent. It is NOT necessary to list the projects connected with an acknowledge funding. @@ -1348,11 +1399,11 @@ dc relation patent - true onebox - Patents that precede (i.e., have priority over) this patent + true + Patents that precede (i.e., have priority over) this patent @@ -1360,11 +1411,11 @@ dc relation references - true onebox - Result outputs that are referenced by this patent + true + Result outputs that are referenced by this patent @@ -1373,22 +1424,22 @@ dc title - false name - + false You must enter least at the Surname. + crisrp name - false name - + false + @@ -1396,11 +1447,11 @@ crisrp name translated - false name - + false + @@ -1408,84 +1459,84 @@ crisrp name variant - true name - + true + person givenName - false onebox - + false + person familyName - false onebox - + false + person birthDate - false date - + false + oairecerif person gender - false dropdown - + false + person jobTitle - false onebox - + false + person affiliation name - false onebox - + false + crisrp workgroup - true onebox - + true + @@ -1493,33 +1544,33 @@ oairecerif identifier url - true group - + true + person email - false onebox - + false + dc subject - true tag - + true + @@ -1527,11 +1578,11 @@ datacite subject fos - true onebox - + true + oecd @@ -1540,11 +1591,11 @@ person identifier orcid - false onebox - Settable by connecting the entity with ORCID + false + Settable by connecting the entity with ORCID all @@ -1553,11 +1604,11 @@ person identifier scopus-author-id - true onebox - + true + @@ -1565,11 +1616,11 @@ person identifier rid - true onebox - + true + @@ -1577,11 +1628,11 @@ oairecerif person affiliation - true inline-group - + true + @@ -1589,55 +1640,55 @@ dc description abstract - false textarea - + false + crisrp education - true inline-group - + true + crisrp country - false dropdown - + false + crisrp qualification - true inline-group - + true + person knowsLanguage - true dropdown - + true + @@ -1645,11 +1696,11 @@ cris policy eperson - false onebox - + false + @@ -1657,11 +1708,11 @@ cris policy group - false onebox - + false + @@ -1670,77 +1721,77 @@ dc title - false onebox - + false You must enter the oganization name. + oairecerif acronym - false onebox - + false + organization parentOrganization - false onebox - + false + crisou director - false onebox - + false + organization foundingDate - false date - + false + crisou boards - true onebox - + true + organization identifier - true qualdrop_value - + true + @@ -1748,11 +1799,11 @@ oairecerif identifier url - true onebox - + true + @@ -1760,11 +1811,11 @@ dc description abstract - false textarea - + false + @@ -1772,32 +1823,32 @@ organization address addressLocality - false onebox - + false + organization address addressCountry - false dropdown - + false + dc type - false dropdown - + false You must specify the organisation type + @@ -1806,88 +1857,88 @@ dc title - false onebox - + false You must enter the project name. + oairecerif acronym - false onebox - + false + crispj coordinator - true onebox - + true + oairecerif internalid - false onebox - + false + crispj partnerou - true onebox - + true + crispj investigator - false onebox - + false You must enter the project coordinator. + crispj openaireid - false onebox - + false + crispj organization - true onebox - + true + @@ -1895,32 +1946,32 @@ oairecerif identifier url - true onebox - + true + oairecerif oamandate - false dropdown - + false + oairecerif oamandate url - false onebox - + false + @@ -1928,21 +1979,21 @@ oairecerif project startDate - false date - + false + oairecerif project endDate - false date - + false + @@ -1950,22 +2001,22 @@ oairecerif project status - false onebox - + false + dc type - false dropdown - + false + @@ -1973,33 +2024,33 @@ dc description abstract - false textarea - + false + crispj coinvestigators - true onebox - + true + dc subject - true tag - + true + @@ -2007,11 +2058,11 @@ datacite subject fos - true onebox - + true + oecd @@ -2020,11 +2071,11 @@ dc relation equipment - true onebox - + true + @@ -2033,31 +2084,31 @@ dc title - false onebox - + false You must enter the equipment name. + oairecerif acronym - false onebox - + false + oairecerif internalid - false onebox - + false + @@ -2065,31 +2116,31 @@ dc relation project - false onebox - + false + oairecerif funder - false onebox - + false + oairecerif fundingParent - false onebox - Link this funding with its upper level if applicable + false + Link this funding with its upper level if applicable @@ -2097,53 +2148,65 @@ crisfund award url - false onebox - + false The url preferably on the funder website of the award notice + + + + + + crisfund + award + uri + + onebox + false + The Award URI + oairecerif oamandate - false dropdown - + false + oairecerif oamandate url - false onebox - + false + oairecerif amount - false onebox - + false + oairecerif amount currency - false dropdown - + false + @@ -2151,97 +2214,97 @@ oairecerif funding identifier - false onebox - + false + oairecerif funding startDate - false date - + false + oairecerif funding endDate - false date - + false + dc type - false dropdown - + false + dc description - false textarea - + false + crisfund investigators - true onebox - + true + crisfund coinvestigators - true onebox - + true + crisfund leadorganizations - true onebox - + true + crisfund leadcoorganizations - true onebox - + true + @@ -2250,66 +2313,66 @@ dc title - false onebox - + false You must enter the equipment name. + oairecerif acronym - false onebox - + false + oairecerif internalid - false onebox - + false + crisequipment ownerou - false onebox - + false + crisequipment ownerrp - false onebox - + false + dc description - false textarea - + false + @@ -2318,33 +2381,33 @@ dc title - false onebox - + false + oairecerif acronym - false onebox - + false + dc type - false dropdown - + false + @@ -2352,21 +2415,21 @@ oairecerif event startDate - false date - + false + oairecerif event endDate - false date - + false + @@ -2374,11 +2437,11 @@ oairecerif event place - false onebox - + false + @@ -2386,77 +2449,77 @@ oairecerif event country - false dropdown - + false + crisevent organizerou - true onebox - + true + crisevent organizerpj - true onebox - + true + crisevent sponsorou - true onebox - + true + crisevent sponsorpj - true onebox - + true + crisevent partnerou - true onebox - + true + crisevent partnerpj - true onebox - + true + @@ -2464,22 +2527,22 @@ dc description abstract - false textarea - + false + dc subject - true tag - + true + @@ -2488,11 +2551,11 @@ cris owner - false onebox - + false + @@ -2653,6 +2716,10 @@ + + N/A + + English (United States) en_US @@ -3051,6 +3118,10 @@ + + Unspecified + + Academic Institute Academic Institute @@ -3105,6 +3176,10 @@ + + Unspecified + + basic research basic research @@ -3119,6 +3194,10 @@ + + Unspecified + + Gift Gift @@ -3141,6 +3220,10 @@ + + Unspecified + + Conference Conference @@ -3151,6 +3234,10 @@ + + Unspecified + + Afghanistan AF @@ -4149,6 +4236,10 @@ + + Unspecified + + Logo logo From 57841a22a1c51fae4e4f0931cdcaa86e6a61880e Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Tue, 9 Jan 2024 09:28:55 +0100 Subject: [PATCH 644/686] [DSC-1438] Remove authority.cfg from test folder --- .../dspaceFolder/config/modules/authority.cfg | 285 ------------------ dspace/config/modules/authority.cfg | 3 - 2 files changed, 288 deletions(-) delete mode 100644 dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg diff --git a/dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg b/dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg deleted file mode 100644 index 10e4bc36fbb9..000000000000 --- a/dspace-api/src/test/data/dspaceFolder/config/modules/authority.cfg +++ /dev/null @@ -1,285 +0,0 @@ -#---------------------------------------------------------------# -#----------------- AUTHORITY CONFIGURATIONS --------------------# -#---------------------------------------------------------------# -# These configs are used by the authority framework # -#---------------------------------------------------------------# - -## The DCInputAuthority plugin is automatically configured with every -## value-pairs element in input-forms.xml, namely: -## common_identifiers, common_types, common_iso_languages -## -## The DSpaceControlledVocabulary plugin is automatically configured -## with every *.xml file in [dspace]/config/controlled-vocabularies, -## and creates a plugin instance for each, using base filename as the name. -## eg: nsi, srsc. -## Each DSpaceControlledVocabulary plugin comes with three configuration options: -# vocabulary.plugin._plugin_.hierarchy.store = # default: true -# vocabulary.plugin._plugin_.hierarchy.suggest = # default: false -# vocabulary.plugin._plugin_.delimiter = "" # default: "::" -## -## An example using "srsc" can be found later in this section - -plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \ - org.dspace.content.authority.DCInputAuthority, \ - org.dspace.content.authority.DSpaceControlledVocabulary - - ## -## This sets the default lowest confidence level at which a metadata value is included -## in an authority-controlled browse (and search) index. It is a symbolic -## keyword, one of the following values (listed in descending order): -## accepted -## uncertain -## ambiguous -## notfound -## failed -## rejected -## novalue -## unset -## See manual or org.dspace.content.authority.Choices source for descriptions. -authority.minconfidence = ambiguous - -# Configuration settings for ORCID based authority control. -# Uncomment the lines below to enable configuration -#choices.plugin.dc.contributor.author = SolrAuthorAuthority -#choices.presentation.dc.contributor.author = authorLookup -#authority.controlled.dc.contributor.author = true -#authority.author.indexer.field.1=dc.contributor.author - -## -## This sets the lowest confidence level at which a metadata value is included -## in an authority-controlled browse (and search) index. It is a symbolic -## keyword from the same set as for the default "authority.minconfidence" -#authority.minconfidence.dc.contributor.author = accepted - -## demo: subject code autocomplete, using srsc as authority -## (DSpaceControlledVocabulary plugin must be enabled) -## Warning: when enabling this feature any controlled vocabulary configuration in the input-forms.xml for the metadata field will be overridden. -#vocabulary.plugin.srsc.hierarchy.store = true -#vocabulary.plugin.srsc.hierarchy.suggest = true -#vocabulary.plugin.srsc.delimiter = "::" - -# publisher name lookup through SHERPA/RoMEO: -#choices.plugin.dc.publisher = SRPublisher -#choices.presentation.dc.publisher = suggest - -## demo: journal title lookup, with ISSN as authority -#choices.plugin.dc.title.alternative = SRJournalTitle -#choices.presentation.dc.title.alternative = suggest -#authority.controlled.dc.title.alternative = true - -##### Authority Control Settings ##### -#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ -# org.dspace.content.authority.SampleAuthority = Sample, \ -# org.dspace.content.authority.SHERPARoMEOPublisher = SRPublisher, \ -# org.dspace.content.authority.SHERPARoMEOJournalTitle = SRJournalTitle, \ -# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority - -#Uncomment to enable ORCID authority control -#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ -# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority - -##### Authority Control Settings ##### -plugin.named.org.dspace.content.authority.ChoiceAuthority = \ - org.dspace.content.authority.OrcidAuthority = AuthorAuthority,\ - org.dspace.content.authority.OrcidAuthority = EditorAuthority,\ - org.dspace.content.authority.OrcidAuthority = AuthorStrictMatchAuthority, \ - org.dspace.content.authority.OrcidAuthority = AuthorCoarseMatchAuthority, \ - org.dspace.content.authority.ItemAuthority = DataSetAuthority,\ - org.dspace.content.authority.ItemAuthority = JournalAuthority,\ - org.dspace.content.authority.RorOrgUnitAuthority = OrgUnitAuthority,\ - org.dspace.content.authority.ItemAuthority = ProjectAuthority,\ - org.dspace.content.authority.OpenAIREProjectAuthority = FundingAuthority,\ - org.dspace.content.authority.ItemAuthority = PublicationAuthority,\ - org.dspace.content.authority.ItemAuthority = EquipmentAuthority,\ - org.dspace.content.authority.EPersonAuthority = EPersonAuthority,\ - org.dspace.content.authority.GroupAuthority = GroupAuthority,\ - org.dspace.content.authority.ZDBAuthority = ZDBAuthority,\ - org.dspace.content.authority.SherpaAuthority = SherpaAuthority - -cris.ItemAuthority.forceInternalName = true - -# AuthorStrictMatchAuthority configuration -cris.ItemAuthority.AuthorStrictMatchAuthority.forceInternalName = false - -# AuthorCoarseMatchAuthority configuration -cris.ItemAuthority.AuthorCoarseMatchAuthority.forceInternalName = false - -cris.ItemAuthority.DataSetAuthority.entityType = Product -cris.ItemAuthority.JournalAuthority.entityType = Journal -cris.ItemAuthority.OrgUnitAuthority.entityType = OrgUnit -cris.ItemAuthority.ProjectAuthority.entityType = Project -cris.ItemAuthority.FundingAuthority.entityType = Funding -cris.ItemAuthority.PublicationAuthority.entityType = Publication -cris.ItemAuthority.EquipmentAuthority.entityType = Equipment - -## OrcidAuthority Extras configuration -# - -cris.OrcidAuthority.EditorAuthority.institution.key = oairecerif_editor_affiliation -cris.OrcidAuthority.AuthorAuthority.institution.key = oairecerif_author_affiliation - -#cris.OrcidAuthority.AuthorAuthority.institution.display = true -#cris.OrcidAuthority.AuthorAuthority.institution.as-data = true -# -#cris.OrcidAuthority.AuthorAuthority.orcid-id.key = person_identifier_orcid -#cris.OrcidAuthority.AuthorAuthority.orcid-id.display = true -#cris.OrcidAuthority.AuthorAuthority.orcid-id.as-data = true - - -cris.SherpaAuthority.entityType = Journal -cris.SherpaAuthority.local-item-choices-enabled = true - -ItemAuthority.reciprocalMetadata.Publication.dc.relation.product = dc.relation.publication -ItemAuthority.reciprocalMetadata.Product.dc.relation.publication = dc.relation.product - -choices.plugin.dc.contributor.author = AuthorAuthority -choices.presentation.dc.contributor.author = suggest -authority.controlled.dc.contributor.author = true - -choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority -choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority - -choices.plugin.oairecerif.author.affiliation = OrgUnitAuthority -choices.presentation.oairecerif.author.affiliation = suggest -authority.controlled.oairecerif.author.affiliation = true - -choices.plugin.dc.contributor.editor = EditorAuthority -choices.presentation.dc.contributor.editor = suggest -authority.controlled.dc.contributor.editor = true - -choices.plugin.oairecerif.editor.affiliation = OrgUnitAuthority -choices.presentation.oairecerif.editor.affiliation = suggest -authority.controlled.oairecerif.editor.affiliation = true - -choices.plugin.dc.relation.product = DataSetAuthority -choices.presentation.dc.relation.product = suggest -authority.controlled.dc.relation.product = true - -choices.plugin.dc.relation.publication = PublicationAuthority -choices.presentation.dc.relation.publication = suggest -authority.controlled.dc.relation.publication = true - -choices.plugin.dc.relation.journal = SherpaAuthority -choices.presentation.dc.relation.journal = suggest -authority.controlled.dc.relation.journal = true - -choices.plugin.dc.relation.project = ProjectAuthority -choices.presentation.dc.relation.project = suggest -authority.controlled.dc.relation.project = true - -choices.plugin.dc.relation.funding = FundingAuthority -choices.presentation.dc.relation.funding = suggest -# the externalsource provider identifier -choises.externalsource.dc.relation.funding = fundingAuthority -authority.controlled.dc.relation.funding = true - -choices.plugin.dc.description.sponsorship = OrgUnitAuthority -choices.presentation.dc.description.sponsorship = suggest -authority.controlled.dc.description.sponsorship = true - -### authorities required by Projects -choices.plugin.crispj.coordinator = OrgUnitAuthority -choices.presentation.crispj.coordinator = suggest -authority.controlled.crispj.coordinator = true - -choices.plugin.crispj.organization = OrgUnitAuthority -choices.presentation.crispj.organization = suggest -authority.controlled.crispj.organization = true - -choices.plugin.crispj.partnerou = OrgUnitAuthority -choices.presentation.crispj.partnerou = suggest -authority.controlled.crispj.partnerou = true - -choices.plugin.crispj.investigator = AuthorAuthority -choices.presentation.crispj.investigator = suggest -authority.controlled.crispj.investigator = true - -choices.plugin.crispj.coinvestigators = AuthorAuthority -choices.presentation.crispj.coinvestigators = suggest -authority.controlled.crispj.coinvestigators = true - -choices.plugin.dc.relation.equipment = EquipmentAuthority -choices.presentation.dc.relation.equipment = suggest -authority.controlled.dc.relation.equipment = true - -### authorities required by Person -choices.plugin.person.affiliation.name = OrgUnitAuthority -choices.presentation.person.affiliation.name = suggest -authority.controlled.person.affiliation.name = true - -choices.plugin.oairecerif.person.affiliation = OrgUnitAuthority -choices.presentation.oairecerif.person.affiliation = suggest -authority.controlled.oairecerif.person.affiliation = true - -#choices.plugin.crisrp.workgroup = OrgUnitAuthority -#choices.presentation.crisrp.workgroup = suggest -#authority.controlled.crisrp.workgroup = true - -#choices.plugin.crisrp.qualification = OrgUnitAuthority -#choices.presentation.crisrp.qualification = suggest -#authority.controlled.crisrp.qualification = true - -#choices.plugin.crisrp.education = OrgUnitAuthority -#choices.presentation.crisrp.education = suggest -#authority.controlled.crisrp.education = true - -### authorities required by OrgUnit -choices.plugin.organization.parentOrganization = OrgUnitAuthority -choices.presentation.organization.parentOrganization = suggest -authority.controlled.organization.parentOrganization = true - -choices.plugin.crisou.director= AuthorAuthority -choices.presentation.crisou.director = suggest -authority.controlled.crisou.director = true - -### authorities required by Funding -choices.plugin.oairecerif.funder = OrgUnitAuthority -choices.presentation.oairecerif.funder = suggest -authority.controlled.oairecerif.funder = true - -choices.plugin.oairecerif.fundingParent = FundingAuthority -choices.presentation.oairecerif.fundingParent = suggest -authority.controlled.oairecerif.fundingParent = true - -choices.plugin.crisfund.investigators = AuthorAuthority -choices.presentation.crisfund.investigators = suggest -authority.controlled.crisfund.investigators = true - -choices.plugin.crisfund.coinvestigators = AuthorAuthority -choices.presentation.crisfund.coinvestigators = suggest -authority.controlled.crisfund.coinvestigators = true - -choices.plugin.crisfund.leadorganizations = OrgUnitAuthority -choices.presentation.crisfund.leadorganizations = suggest -authority.controlled.crisfund.leadorganizations = true - -choices.plugin.crisfund.leadcoorganizations = OrgUnitAuthority -choices.presentation.crisfund.leadcoorganizations = suggest -authority.controlled.crisfund.leadcoorganizations = true - -### authorities required by cris features -choices.plugin.cris.policy.eperson = EPersonAuthority -choices.presentation.cris.policy.eperson = suggest -authority.controlled.cris.policy.eperson = true - -choices.plugin.cris.policy.group = GroupAuthority -choices.presentation.cris.policy.group = suggest -authority.controlled.cris.policy.group = true - -choices.plugin.dspace.object.owner = EPersonAuthority -choices.presentation.dspace.object.owner = suggest -authority.controlled.dspace.object.owner = true - -choices.plugin.dc.identifier.issn = ZDBAuthority -choices.presentation.dc.identifier.issn = suggest -authority.controlled.dc.identifier.issn = true - -choices.plugin.dc.relation.ispartof = SherpaAuthority -choices.presentation.dc.relation.ispartof = suggest -authority.controlled.dc.relation.ispartof = true - -authority.controlled.dc.type = true -choices.plugin.dc.type = ControlledVocabularyAuthority - -# DSpace-CRIS stores by default the authority of controlled vocabularies -vocabulary.plugin.authority.store = true diff --git a/dspace/config/modules/authority.cfg b/dspace/config/modules/authority.cfg index 99ccdbc025bb..037cf01e5d79 100644 --- a/dspace/config/modules/authority.cfg +++ b/dspace/config/modules/authority.cfg @@ -136,9 +136,6 @@ choices.plugin.dc.contributor.author = AuthorAuthority choices.presentation.dc.contributor.author = suggest authority.controlled.dc.contributor.author = true -#choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority -#choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority - choices.plugin.oairecerif.author.affiliation = OrgUnitAuthority choices.presentation.oairecerif.author.affiliation = suggest authority.controlled.oairecerif.author.affiliation = true From 4b62f7980d53b01030c02bdae7e22de28eb4d9bf Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Tue, 9 Jan 2024 13:43:09 +0100 Subject: [PATCH 645/686] [DSC-1438] Remove duplicated metadata --- dspace/config/submission-forms.xml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 86212e2f884c..45137c1ab15d 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -782,18 +782,6 @@ it, please enter the types and the actual numbers or codes. publication-coar-types:c_efa0,publication-coar-types:c_ba08 - - - dc - description - sponsorship - - onebox - true - - Enter the name of any sponsors. - - oaire From db42cf641c4b04d48450ffcf1a7f5093503d4b74 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Tue, 9 Jan 2024 14:34:03 +0100 Subject: [PATCH 646/686] [DSC-1438] Revert indentation changes --- .../dspaceFolder/config/item-submission.xml | 561 +++++++++--------- 1 file changed, 280 insertions(+), 281 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml index 8956710a3e3a..78d4eb9171c4 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/item-submission.xml @@ -7,286 +7,285 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - org.dspace.app.rest.submit.step.CollectionStep - collection - submission - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.steptwo - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - - submit.progressbar.upload - org.dspace.app.rest.submit.step.UploadStep - upload - - - submit.progressbar.license - org.dspace.app.rest.submit.step.LicenseStep - license - submission - - - - - - - - - submit.progressbar.CClicense - org.dspace.app.rest.submit.step.CCLicenseStep - cclicense - - - - - - - - - - - - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - submit.progressbar.describe.stepone - org.dspace.app.rest.submit.step.DescribeStep - submission-form - - - - - submit.progressbar.identifiers - org.dspace.app.rest.submit.step.ShowIdentifiersStep - identifiers - - - - - Sample - org.dspace.submit.step.SampleStep - sample - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + submission + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.steptwo + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + submit.progressbar.upload + org.dspace.app.rest.submit.step.UploadStep + upload + + + submit.progressbar.license + org.dspace.app.rest.submit.step.LicenseStep + license + submission + + + + + + + + submit.progressbar.CClicense + org.dspace.app.rest.submit.step.CCLicenseStep + cclicense + + + + + + + + + + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + + submit.progressbar.identifiers + org.dspace.app.rest.submit.step.ShowIdentifiersStep + identifiers + + + + + Sample + org.dspace.submit.step.SampleStep + sample + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 975e0d4de824239b52791cefe1c6108c8f310e0f Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Fri, 22 Dec 2023 08:18:43 +0100 Subject: [PATCH 647/686] [DSC-1435] improve for email template for subscriptions_content and email about subscribed statistics update. --- .../main/java/org/dspace/subscriptions/ContentGenerator.java | 2 +- .../java/org/dspace/subscriptions/StatisticsGenerator.java | 4 +++- dspace/config/emails/subscriptions_content | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java index 80e329ff63f4..65f1ae9dcf9b 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java @@ -85,7 +85,7 @@ private String generateBodyMail(String type, List subscription for (SubscriptionItem item : subscriptionItems) { out.write("\n".getBytes(UTF_8)); out.write("List of new content for the\n".getBytes(UTF_8)); - out.write((type + " " + item.getName() + " - " + item.getUrl() + "\n") + out.write((type + " \"" + item.getName() + "\" - " + item.getUrl() + "\n") .getBytes(UTF_8)); for (Entry entry : item.getItemUrlsByItemName().entrySet()) { diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java index 842ff9aa0e8f..43ff6b71d4b5 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/StatisticsGenerator.java @@ -51,8 +51,10 @@ public void notifyForSubscriptions(Context c, EPerson ePerson, List String name = configurationService.getProperty("dspace.name"); File attachment = generateExcel(crisMetricsList, c); email.addAttachment(attachment, "subscriptions.xlsx"); + email.setSubject(name + ": Statistics of records which you are subscribed"); email.setContent("intro", - "This automatic email is sent by " + name + " based on the subscribed statistics updates."); + "This automatic email is sent by " + name + " based on the subscribed statistics updates.\n\n" + + "See additional details in the file attached."); email.send(); } } catch (Exception ex) { diff --git a/dspace/config/emails/subscriptions_content b/dspace/config/emails/subscriptions_content index e76c8c311a4f..9d2d255c06d3 100644 --- a/dspace/config/emails/subscriptions_content +++ b/dspace/config/emails/subscriptions_content @@ -4,6 +4,7 @@ ## {1} Collections updates block ## {2} Communities updates block ## {3} Entity updates block +#set($subject = "${config.get('dspace.name')}: Statistics of updates on subscribed items") This email is sent from ${config.get('dspace.name')} based on the chosen subscription preferences. You can manage your subscription preferences from ${params[0]} From d50aa9d87fa0f79e1a7768624a89c9283ea83811 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Thu, 11 Jan 2024 22:06:25 +0100 Subject: [PATCH 648/686] DSC-1455 simplify the uuid iterator implementation retrieving just the uuid from the database initial query --- .../mediafilter/MediaFilterServiceImpl.java | 15 +-- .../dao/impl/RequestItemDAOImpl.java | 2 +- .../content/dao/impl/BitstreamDAOImpl.java | 35 +++--- .../dspace/content/dao/impl/ItemDAOImpl.java | 105 +++++++++++------- .../dao/impl/MetadataValueDAOImpl.java | 4 +- .../org/dspace/core/AbstractHibernateDAO.java | 15 ++- .../java/org/dspace/core/UUIDIterator.java | 76 ++----------- .../main/java/org/dspace/curate/Curator.java | 23 +--- 8 files changed, 126 insertions(+), 149 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index 961cbaa7d0ee..3f898bd7d9f1 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -37,7 +37,6 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.SelfNamedPlugin; -import org.dspace.core.UUIDIterator; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -135,17 +134,13 @@ public void applyFiltersCommunity(Context context, Community community) throws Exception { //only apply filters if community not in skip-list if (!inSkipList(community.getHandle())) { List subcommunities = community.getSubcommunities(); - List collections = community.getCollections(); - - UUIDIterator communityIterator = new UUIDIterator<>(context, subcommunities, Community.class); - UUIDIterator collectionIterator = new UUIDIterator<>(context, collections, Collection.class); - - while (communityIterator.hasNext()) { - applyFiltersCommunity(context, communityIterator.next()); + for (Community subcommunity : subcommunities) { + applyFiltersCommunity(context, subcommunity); } - while (collectionIterator.hasNext()) { - applyFiltersCollection(context, collectionIterator.next()); + List collections = community.getCollections(); + for (Collection collection : collections) { + applyFiltersCollection(context, collection); } } } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java index a09a2bf250e5..008174ded88c 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java @@ -46,6 +46,6 @@ public RequestItem findByToken(Context context, String token) throws SQLExceptio public Iterator findByItem(Context context, Item item) throws SQLException { Query query = createQuery(context, "FROM RequestItem WHERE item_id= :uuid"); query.setParameter("uuid", item.getID()); - return iterate(context, query, RequestItem.class); + return iterate(query); } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java index a4f61f0c6435..a3347a40ab93 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java @@ -27,6 +27,7 @@ import org.dspace.core.AbstractHibernateDSODAO; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.UUIDIterator; /** * Hibernate implementation of the Database Access Object interface class for the Bitstream object. @@ -77,7 +78,7 @@ public List findBitstreamsWithNoRecentChecksum(Context context) throw @Override public Iterator findByCommunity(Context context, Community community) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b " + + Query query = createQuery(context, "select b.id from Bitstream b " + "join b.bundles bitBundles " + "join bitBundles.items item " + "join item.collections itemColl " + @@ -85,40 +86,43 @@ public Iterator findByCommunity(Context context, Community community) "WHERE :community IN community"); query.setParameter("community", community); - - return iterate(context, query, Bitstream.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findByCollection(Context context, Collection collection) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b " + + Query query = createQuery(context, "select b.id from Bitstream b " + "join b.bundles bitBundles " + "join bitBundles.items item " + "join item.collections c " + "WHERE :collection IN c"); query.setParameter("collection", collection); - - return iterate(context, query, Bitstream.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findByItem(Context context, Item item) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b " + + Query query = createQuery(context, "select b.id from Bitstream b " + "join b.bundles bitBundles " + "join bitBundles.items item " + "WHERE :item IN item"); query.setParameter("item", item); - - return iterate(context, query, Bitstream.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findShowableByItem(Context context, UUID itemId, String bundleName) throws SQLException { Query query = createQuery( context, - "select b from Bitstream b " + + "select b.id from Bitstream b " + "join b.bundles bitBundle " + "join bitBundle.items item " + "WHERE item.id = :itemId " + @@ -150,15 +154,18 @@ public Iterator findShowableByItem(Context context, UUID itemId, Stri query.setParameter("itemId", itemId); query.setParameter("bundleName", bundleName); - - return iterate(context, query, Bitstream.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override public Iterator findByStoreNumber(Context context, Integer storeNumber) throws SQLException { - Query query = createQuery(context, "select b from Bitstream b where b.storeNumber = :storeNumber"); + Query query = createQuery(context, "select b.id from Bitstream b where b.storeNumber = :storeNumber"); query.setParameter("storeNumber", storeNumber); - return iterate(context, query, Bitstream.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Bitstream.class, this); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java index 443268cbbb7a..3b12c68dcedd 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java @@ -29,6 +29,7 @@ import org.dspace.content.dao.ItemDAO; import org.dspace.core.AbstractHibernateDSODAO; import org.dspace.core.Context; +import org.dspace.core.UUIDIterator; import org.dspace.eperson.EPerson; import org.hibernate.Criteria; import org.hibernate.criterion.Criterion; @@ -56,28 +57,34 @@ protected ItemDAOImpl() { @Override public Iterator findAll(Context context, boolean archived) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); + Query query = createQuery(context, "SELECT i.id FROM Item i WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findAll(Context context, boolean archived, int limit, int offset) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); + Query query = createQuery(context, "SELECT i.id FROM Item i WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); query.setFirstResult(offset); query.setMaxResults(limit); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findAll(Context context, boolean archived, boolean withdrawn) throws SQLException { Query query = createQuery(context, - "FROM Item WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id"); + "SELECT i.id FROM Item i WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id"); query.setParameter("in_archive", archived); query.setParameter("withdrawn", withdrawn); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -86,12 +93,14 @@ public Iterator findAllRegularItems(Context context) throws SQLException { // It does not include workspace, workflow or template items. Query query = createQuery( context, - "SELECT i FROM Item as i " + + "SELECT i.id FROM Item as i " + "LEFT JOIN Version as v ON i = v.item " + "WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " + "ORDER BY i.id" ); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -99,7 +108,7 @@ public Iterator findAll(Context context, boolean archived, boolean withdrawn, boolean discoverable, Date lastModified) throws SQLException { StringBuilder queryStr = new StringBuilder(); - queryStr.append("SELECT i FROM Item i"); + queryStr.append("SELECT i.id FROM Item i"); queryStr.append(" WHERE (inArchive = :in_archive OR withdrawn = :withdrawn)"); queryStr.append(" AND discoverable = :discoverable"); @@ -115,16 +124,20 @@ public Iterator findAll(Context context, boolean archived, if (lastModified != null) { query.setParameter("last_modified", lastModified, TemporalType.TIMESTAMP); } - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException { Query query = createQuery(context, - "FROM Item WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id"); + "SELECT i.id FROM Item i WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id"); query.setParameter("in_archive", true); query.setParameter("submitter", eperson); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -133,16 +146,18 @@ public Iterator findBySubmitter(Context context, EPerson eperson, boolean if (!retrieveAllItems) { return findBySubmitter(context, eperson); } - Query query = createQuery(context, "FROM Item WHERE submitter=:submitter ORDER BY id"); + Query query = createQuery(context, "SELECT i.id FROM Item i WHERE submitter=:submitter ORDER BY id"); query.setParameter("submitter", eperson); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit) throws SQLException { StringBuilder query = new StringBuilder(); - query.append("SELECT item FROM Item as item "); + query.append("SELECT item.id FROM Item as item "); addMetadataLeftJoin(query, Item.class.getSimpleName().toLowerCase(), Collections.singletonList(metadataField)); query.append(" WHERE item.inArchive = :in_archive"); query.append(" AND item.submitter =:submitter"); @@ -154,13 +169,15 @@ public Iterator findBySubmitter(Context context, EPerson eperson, Metadata hibernateQuery.setParameter("in_archive", true); hibernateQuery.setParameter("submitter", eperson); hibernateQuery.setMaxResults(limit); - return iterate(context, hibernateQuery, Item.class); + @SuppressWarnings("unchecked") + List uuids = hibernateQuery.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findByMetadataField(Context context, MetadataField metadataField, String value, boolean inArchive) throws SQLException { - String hqlQueryString = "SELECT item FROM Item as item join item.metadata metadatavalue " + + String hqlQueryString = "SELECT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field"; if (value != null) { hqlQueryString += " AND STR(metadatavalue.value) = :text_value"; @@ -172,13 +189,15 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { query.setParameter("text_value", value); } - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findByMetadataField(Context context, MetadataField metadataField, String value) throws SQLException { - String hqlQueryString = "SELECT item FROM Item as item join item.metadata metadatavalue " + + String hqlQueryString = "SELECT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE metadatavalue.metadataField = :metadata_field"; if (value != null) { hqlQueryString += " AND STR(metadatavalue.value) = :text_value"; @@ -189,7 +208,9 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { query.setParameter("text_value", value); } - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } enum OP { @@ -310,20 +331,22 @@ public Iterator findByMetadataQuery(Context context, List findByAuthorityValue(Context context, MetadataField metadataField, String authority, boolean inArchive) throws SQLException { Query query = createQuery(context, - "SELECT item FROM Item as item join item.metadata metadatavalue " + + "SELECT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " + "metadatavalue.authority = :authority ORDER BY item.id"); query.setParameter("in_archive", inArchive); query.setParameter("metadata_field", metadataField); query.setParameter("authority", authority); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findArchivedByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException { Query query = createQuery(context, - "select i from Item i join i.collections c " + + "select i.id from Item i join i.collections c " + "WHERE :collection IN c AND i.inArchive=:in_archive ORDER BY i.id"); query.setParameter("collection", collection); query.setParameter("in_archive", true); @@ -333,7 +356,9 @@ public Iterator findArchivedByCollection(Context context, Collection colle if (limit != null) { query.setMaxResults(limit); } - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -368,17 +393,18 @@ public int countArchivedByCollectionExcludingOwning(Context context, Collection @Override public Iterator findAllByCollection(Context context, Collection collection) throws SQLException { Query query = createQuery(context, - "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); + "select i.id from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); - - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException { Query query = createQuery(context, - "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); + "select i.id from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); if (offset != null) { @@ -387,8 +413,9 @@ public Iterator findAllByCollection(Context context, Collection collection if (limit != null) { query.setMaxResults(limit); } - - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override @@ -424,9 +451,12 @@ public int countItems(Context context, List collections, boolean inc public Iterator findByLastModifiedSince(Context context, Date since) throws SQLException { Query query = createQuery(context, - "SELECT i FROM Item i WHERE last_modified > :last_modified ORDER BY id"); + "SELECT i.id FROM Item i WHERE last_modified > :last_modified ORDER BY id"); query.setParameter("last_modified", since, TemporalType.TIMESTAMP); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); + } @Override @@ -462,22 +492,21 @@ public Iterator findByLikeAuthorityValue(Context context, String likeAuthority, Boolean inArchive) throws SQLException { String allItems = Objects.isNull(inArchive) ? "" : " item.inArchive=:in_archive AND "; Query query = createQuery(context, - "SELECT DISTINCT item FROM Item as item join item.metadata metadatavalue " + "SELECT DISTINCT item.id FROM Item as item join item.metadata metadatavalue " + "WHERE" + allItems + " metadatavalue.authority like :authority ORDER BY item.id"); if (Objects.nonNull(inArchive)) { query.setParameter("in_archive", inArchive); } query.setParameter("authority", likeAuthority); - return iterate(context, query, Item.class); + @SuppressWarnings("unchecked") + List uuids = query.getResultList(); + return new UUIDIterator(context, uuids, Item.class, this); } @Override public Iterator findByIds(Context context, List ids) throws SQLException { - Query query = createQuery(context, - "SELECT item " + "FROM Item as item WHERE item.id IN (:ids)"); - query.setParameter("ids", ids); - return iterate(context, query, Item.class); + return new UUIDIterator(context, ids, Item.class, this); } } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java index 2450db5bd19b..f37ced9ab7d4 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataValueDAOImpl.java @@ -58,7 +58,7 @@ public Iterator findItemValuesByFieldAndValue(Context context, Query query = createQuery(context, queryString); query.setParameter("metadata_field_id", metadataField.getID()); query.setParameter("text_value", value); - return iterate(context, query, MetadataValue.class); + return iterate(query); } @Override @@ -69,7 +69,7 @@ public Iterator findByValueLike(Context context, String value) th Query query = createQuery(context, queryString); query.setParameter("searchString", value); - return iterate(context, query, MetadataValue.class); + return iterate(query); } @Override diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index c217eed6ab92..c18f256d9564 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -26,6 +26,7 @@ import javax.persistence.criteria.Path; import javax.persistence.criteria.Root; +import com.google.common.collect.AbstractIterator; import org.apache.commons.collections.CollectionUtils; import org.hibernate.Session; @@ -341,14 +342,22 @@ public T uniqueResult(Query query) { * @param query * The query for which an Iterator will be made * @return The Iterator for the results of this query - * @throws SQLException */ - public Iterator iterate(Context ctx, Query query, Class entityType) throws SQLException { + public Iterator iterate(Query query) { @SuppressWarnings("unchecked") org.hibernate.query.Query hquery = query.unwrap(org.hibernate.query.Query.class); Stream stream = hquery.stream(); Iterator iter = stream.iterator(); - return new UUIDIterator(ctx, iter, entityType); + return new AbstractIterator () { + @Override + protected T computeNext() { + return iter.hasNext() ? iter.next() : endOfData(); + } + @Override + public void finalize() { + stream.close(); + } + }; } /** diff --git a/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java index f14ece677426..679f623eb850 100644 --- a/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java +++ b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java @@ -8,20 +8,15 @@ package org.dspace.core; import java.sql.SQLException; -import java.util.Collection; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; import java.util.UUID; import com.google.common.collect.AbstractIterator; import org.dspace.content.DSpaceObject; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.exception.SQLRuntimeException; import org.springframework.beans.factory.annotation.Autowired; - - /** * Iterator implementation which allows to iterate over items and commit while * iterating. Using a list of UUID the iterator doesn't get invalidated after a @@ -30,77 +25,30 @@ * @author stefano.maffei at 4science.com * @param class type */ -public class UUIDIterator extends AbstractIterator { - - private Class entityTypeClass; - - private List cachedUUIDs = new LinkedList<>(); - - private Iterator uuidIterator; +public class UUIDIterator extends AbstractIterator { + private Class clazz; - private Iterator iterator; + private Iterator iterator; - @SuppressWarnings("rawtypes") @Autowired - private DSpaceObjectService dsoService; + private AbstractHibernateDSODAO dao; private Context ctx; - private boolean isSupportedUUIDIterator; - - public UUIDIterator(Context ctx, Iterator i, Class entityTypeClass) throws SQLException { + public UUIDIterator(Context ctx, List uuids, Class clazz, AbstractHibernateDSODAO dao) + throws SQLException { this.ctx = ctx; - - this.entityTypeClass = entityTypeClass; - isSupportedUUIDIterator = DSpaceObject.class.isAssignableFrom(this.entityTypeClass); - - if (isSupportedUUIDIterator) { - while (i.hasNext()) { - DSpaceObject dso = (DSpaceObject) i.next(); - if (dsoService == null) { - dsoService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); - } - cachedUUIDs.add(dso.getID()); - } - uuidIterator = cachedUUIDs.iterator(); - } else { - iterator = i; - } - - } - - public UUIDIterator(Context ctx, Collection collection, Class entityTypeClass) throws SQLException { - this.ctx = ctx; - - this.entityTypeClass = entityTypeClass; - isSupportedUUIDIterator = DSpaceObject.class.isAssignableFrom(this.entityTypeClass); - - if (isSupportedUUIDIterator) { - for (T obj : collection) { - DSpaceObject dso = (DSpaceObject) obj; - if (dsoService == null) { - dsoService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); - } - cachedUUIDs.add(dso.getID()); - } - uuidIterator = cachedUUIDs.iterator(); - } else { - throw new UnsupportedOperationException("Cannot generate iterator for this collection"); - } - + this.clazz = clazz; + this.dao = dao; + this.iterator = uuids.iterator(); } - @SuppressWarnings("unchecked") @Override protected T computeNext() { try { - if (isSupportedUUIDIterator) { - return uuidIterator.hasNext() ? (T) dsoService.find(ctx, uuidIterator.next()) : endOfData(); - } else { - return iterator.hasNext() ? (T) iterator.next() : endOfData(); - } + return iterator.hasNext() ? dao.findByID(ctx, clazz, iterator.next()) : endOfData(); } catch (SQLException e) { - throw new RuntimeException(e); + throw new SQLRuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index dc160c336d30..b8b174a12610 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -28,7 +28,6 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.UUIDIterator; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; @@ -464,10 +463,8 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { //Then, perform this task for all Top-Level Communities in the Site // (this will recursively perform task for all objects in DSpace) - Iterator iterator = new UUIDIterator(ctx, communityService.findAllTop(ctx), - Community.class); - while (iterator.hasNext()) { - if (!doCommunity(tr, iterator.next())) { + for (Community subcomm : communityService.findAllTop(ctx)) { + if (!doCommunity(tr, subcomm)) { return false; } } @@ -488,24 +485,16 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { * @throws SQLException */ protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, SQLException { - UUIDIterator subComIter = new UUIDIterator(curationContext(), comm.getSubcommunities(), - Community.class); - UUIDIterator collectionsIter = new UUIDIterator(curationContext(), - comm.getCollections(), - Collection.class); - if (!tr.run(comm)) { return false; } - - while (subComIter.hasNext()) { - if (!doCommunity(tr, subComIter.next())) { + for (Community subcomm : comm.getSubcommunities()) { + if (!doCommunity(tr, subcomm)) { return false; } } - - while (collectionsIter.hasNext()) { - if (!doCollection(tr, collectionsIter.next())) { + for (Collection coll : comm.getCollections()) { + if (!doCollection(tr, coll)) { return false; } } From ced78d38f670d7857af36b807e2f4e67c3825a93 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Fri, 12 Jan 2024 19:09:12 +0100 Subject: [PATCH 649/686] DS-1455 fix lazy initialization exception after commit iterating over communities/collections --- dspace-api/src/main/java/org/dspace/curate/Curator.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index b8b174a12610..737f2e2277e5 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -498,6 +498,8 @@ protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, return false; } } + Context context = curationContext(); + context.uncacheEntity(comm); return true; } @@ -524,6 +526,7 @@ protected boolean doCollection(TaskRunner tr, Collection coll) throws IOExceptio return false; } } + context.uncacheEntity(coll); } catch (SQLException sqlE) { throw new IOException(sqlE.getMessage(), sqlE); } @@ -542,6 +545,7 @@ protected void visit(DSpaceObject dso) throws IOException, SQLException { Context curCtx = curationContext(); if (curCtx != null && txScope.equals(TxScope.OBJECT)) { curCtx.commit(); + curCtx.reloadEntity(dso); } } From 8c1fa8a019fb974fa40471da094b88d4e8f998e9 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Fri, 12 Jan 2024 19:51:19 +0100 Subject: [PATCH 650/686] DSC-1455 improve handling of not existings ID and test that findByIds consider distinct uuid --- .../main/java/org/dspace/content/ItemServiceImpl.java | 2 +- .../src/main/java/org/dspace/core/UUIDIterator.java | 11 ++++++++++- .../org/dspace/app/rest/ItemRestRepositoryIT.java | 2 ++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index c61d6e8c8c9f..6b3ef003edca 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -1844,7 +1844,7 @@ public boolean isItemListedForUser(Context context, Item item) { @Override public Iterator findByIds(Context context, List ids) throws SQLException { return itemDAO.findByIds(context, - ids.stream().map(uuid -> UUID.fromString(uuid)).collect(Collectors.toList())); + ids.stream().map(uuid -> UUID.fromString(uuid)).distinct().collect(Collectors.toList())); } @Override diff --git a/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java index 679f623eb850..7cd2616ff6e7 100644 --- a/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java +++ b/dspace-api/src/main/java/org/dspace/core/UUIDIterator.java @@ -46,7 +46,16 @@ public UUIDIterator(Context ctx, List uuids, Class clazz, AbstractHiber @Override protected T computeNext() { try { - return iterator.hasNext() ? dao.findByID(ctx, clazz, iterator.next()) : endOfData(); + if (iterator.hasNext()) { + T item = dao.findByID(ctx, clazz, iterator.next()); + if (item != null) { + return item; + } else { + return computeNext(); + } + } else { + return endOfData(); + } } catch (SQLException e) { throw new SQLRuntimeException(e); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 7e0edf08bac9..681967931f0e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -415,6 +415,8 @@ public void findAllByIdTest() throws Exception { getClient(token).perform(get("/api/core/items/search/findAllById") .param("id", publicItem1.getID().toString(), + publicItem1.getID().toString(), + UUID.randomUUID().toString(), publicItem2.getID().toString(), UUID.randomUUID().toString() )) From dd699c233948706684916398117ab1d3405d802c Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Mon, 15 Jan 2024 10:47:32 +0100 Subject: [PATCH 651/686] DSC-1455 improve description of expected result --- .../test/java/org/dspace/app/rest/ItemRestRepositoryIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 681967931f0e..f3877445895a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -411,7 +411,8 @@ public void findAllByIdTest() throws Exception { context.restoreAuthSystemState(); String token = getAuthToken(admin.getEmail(), password); - // We want to test that only and exclusively existing items are returned. + // We want to test that only and exclusively existing items are returned + // and each item is returned just one time getClient(token).perform(get("/api/core/items/search/findAllById") .param("id", publicItem1.getID().toString(), From db1d7663b6d5b409d2825ff26612152fbb863305 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Mon, 15 Jan 2024 11:49:47 +0100 Subject: [PATCH 652/686] DSC-1458 add and update javadoc to reflect the requested behavior. Add IT to verify the implementation --- .../dspace/content/enhancer/ItemEnhancer.java | 9 ++ .../enhancer/service/ItemEnhancerService.java | 4 +- .../service/ItemEnhancerServiceIT.java | 106 ++++++++++++++++++ 3 files changed, 117 insertions(+), 2 deletions(-) create mode 100644 dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java index a3784146c5ac..70ebf8c5a15a 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java @@ -48,5 +48,14 @@ public interface ItemEnhancer { */ void enhance(Context context, Item item); + /** + * Check if the item needs to be re-evaluated. An item needs to be re-evaluated if the result of the evaluation is + * different than its current state. We delegate this check to the individual item enhancer as it can be smarter in + * performing this check than the service as it knows its internal logic + * + * @param context the DSpace Context + * @param item the item to check + * @return true, if the item must be updated + */ boolean needUpdate(Context context, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java index 5b3b419bfa8f..133b6f74e8b7 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java @@ -28,8 +28,8 @@ public interface ItemEnhancerService { void enhance(Context context, Item item); /** - * Remove all the already calculated virtual metadata fields from the given item - * and perform a new enhancement. + * Force the computation of the enhanced values and, only if the result is different than the current state of the + * item update it. * * @param context the DSpace Context * @param item the item to enhance diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java b/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java new file mode 100644 index 000000000000..76ef29bdb3bd --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java @@ -0,0 +1,106 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.service; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.enhancer.service.ItemEnhancerService; +import org.dspace.content.enhancer.service.impl.ItemEnhancerServiceImpl; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ItemEnhancerServiceIT extends AbstractIntegrationTestWithDatabase { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemEnhancerServiceIT.class); + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private ItemService spyItemService = spy(itemService); + private ItemEnhancerServiceImpl itemEnhancerService = (ItemEnhancerServiceImpl) new DSpace() + .getSingletonService(ItemEnhancerService.class); + + Community community; + Collection collPub; + Collection collPerson; + Item person; + Item publication; + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + */ + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .build(); + collPerson = CollectionBuilder.createCollection(context, community) + .withEntityType("Person") + .build(); + collPub = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + person = ItemBuilder.createItem(context, collPerson) + .withTitle("Famous Researcher") + .withAffiliation("Some department", null) + .build(); + + publication = ItemBuilder.createItem(context, collPub) + .withTitle("Item to enhance") + .withAuthor(person.getName(), person.getID().toString()) + .build(); + assertMetadataValue(itemService.getMetadataByMetadataString(publication, "cris.virtual.department").get(0), + "cris", "virtual", "department", "Some department", null, 0); + context.restoreAuthSystemState(); + itemEnhancerService.setItemService(spyItemService); + } + + @After + public void after() { + itemEnhancerService.setItemService(itemService); + } + + @Test + public void noUpdateRequiredTest() throws Exception { + context.turnOffAuthorisationSystem(); + itemEnhancerService.enhance(context, publication); + verify(spyItemService, never()).update(any(), any()); + itemEnhancerService.forceEnhancement(context, publication); + verify(spyItemService, never()).update(any(), any()); + context.restoreAuthSystemState(); + } + + + private void assertMetadataValue(MetadataValue metadataValue, String schema, String element, String qualifier, + String value, String authority, int place) { + assertThat(metadataValue.getValue(), equalTo(value)); + assertThat(metadataValue.getMetadataField().getMetadataSchema().getName(), equalTo(schema)); + assertThat(metadataValue.getMetadataField().getElement(), equalTo(element)); + assertThat(metadataValue.getMetadataField().getQualifier(), equalTo(qualifier)); + assertThat(metadataValue.getAuthority(), equalTo(authority)); + assertThat(metadataValue.getPlace(), equalTo(place)); + } +} From de625fc8156e38af5ab7f65798ba15d6481dea3d Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Mon, 15 Jan 2024 13:47:01 +0100 Subject: [PATCH 653/686] DSC-1458 update implementation to support deep mode concept --- .../dspace/content/enhancer/ItemEnhancer.java | 15 +- .../consumer/ItemEnhancerConsumer.java | 2 +- .../impl/RelatedEntityItemEnhancer.java | 195 +++++++++++------- .../enhancer/script/ItemEnhancerScript.java | 8 +- .../ItemEnhancerScriptConfiguration.java | 4 +- .../enhancer/service/ItemEnhancerService.java | 12 +- .../service/impl/ItemEnhancerServiceImpl.java | 47 +---- .../service/ItemEnhancerServiceIT.java | 4 +- 8 files changed, 142 insertions(+), 145 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java index 70ebf8c5a15a..ee6bdbf1dff6 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/ItemEnhancer.java @@ -45,17 +45,10 @@ public interface ItemEnhancer { * * @param context the DSpace Context * @param item the item to enhance + * @param deepMode false, if the implementation can assume that only the target + * item as been updated since the eventual previous computation of enhanced metadata + * @return true, if any changes have been performed on the provided item */ - void enhance(Context context, Item item); + boolean enhance(Context context, Item item, boolean deepMode); - /** - * Check if the item needs to be re-evaluated. An item needs to be re-evaluated if the result of the evaluation is - * different than its current state. We delegate this check to the individual item enhancer as it can be smarter in - * performing this check than the service as it knows its internal logic - * - * @param context the DSpace Context - * @param item the item to check - * @return true, if the item must be updated - */ - boolean needUpdate(Context context, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java index 5a2ae2975ef8..c526537bf5ac 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumer.java @@ -61,7 +61,7 @@ public void consume(Context context, Event event) throws Exception { context.turnOffAuthorisationSystem(); try { - itemEnhancerService.enhance(context, item); + itemEnhancerService.enhance(context, item, false); } finally { context.restoreAuthSystemState(); } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java index 1eb85afe817c..db0fc6573f06 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java @@ -18,9 +18,11 @@ import java.util.function.Consumer; import java.util.function.Predicate; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; import org.dspace.content.MetadataValue; +import org.dspace.content.authority.Choices; +import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.enhancer.AbstractItemEnhancer; import org.dspace.content.enhancer.ItemEnhancer; import org.dspace.content.service.ItemService; @@ -57,88 +59,148 @@ public boolean canEnhance(Context context, Item item) { } @Override - public void enhance(Context context, Item item) { - try { - boolean isMetadataDeleted = cleanObsoleteVirtualFields(context, item); - if (isMetadataDeleted) { - updateVirtualFieldsPlaces(context, item); + public boolean enhance(Context context, Item item, boolean deepMode) { + boolean result = false; + if (!deepMode) { + try { + result = cleanObsoleteVirtualFields(context, item); + result = result || updateVirtualFieldsPlaces(context, item); + result = result || performEnhancement(context, item); + } catch (SQLException e) { + LOGGER.error("An error occurs enhancing item with id {}: {}", item.getID(), e.getMessage(), e); + throw new SQLRuntimeException(e); } - if (needUpdate(context, item)) { - performEnhancement(context, item); + } else { + List currMetadataValues = getCurrentVirtualMetadata(context, item); + List toBeMetadataValues = getToBeVirtualMetadata(context, item); + if (!equivalent(currMetadataValues, toBeMetadataValues)) { + try { + itemService.removeMetadataValues(context, item, currMetadataValues); + addMetadata(context, item, toBeMetadataValues); + } catch (SQLException e) { + throw new SQLRuntimeException(e); + } + result = true; } - } catch (SQLException e) { - LOGGER.error("An error occurs enhancing item with id {}: {}", item.getID(), e.getMessage(), e); - throw new SQLRuntimeException(e); } + return result; } - @Override - public boolean needUpdate(Context context, Item item) { - List metadataValuesToDelete = getObsoleteVirtualFields(item); - boolean isNeedUpdateMetadata = false; + private void addMetadata(Context context, Item item, List toBeMetadataValues) + throws SQLException { + for (MetadataValueDTO dto : toBeMetadataValues) { + itemService.addMetadata(context, item, dto.getSchema(), dto.getElement(), dto.getQualifier(), null, + dto.getValue(), dto.getAuthority(), dto.getConfidence()); + } + } - if (!noEnhanceableMetadata(context, item)) { - for (MetadataValue metadataValue : getEnhanceableMetadataValue(item)) { - if (wasValueAlreadyUsedForEnhancement(item, metadataValue)) { - continue; + private boolean equivalent(List currMetadataValues, List toBeMetadataValues) { + if (currMetadataValues.size() != toBeMetadataValues.size()) { + return false; + } else { + for (int idx = 0; idx < currMetadataValues.size(); idx++) { + if (!equivalent(currMetadataValues.get(idx), toBeMetadataValues.get(idx))) { + return false; } + } + } + return true; + } - Item relatedItem = findRelatedEntityItem(context, metadataValue); - if (relatedItem == null) { - isNeedUpdateMetadata = true; - break; - } + private boolean equivalent(MetadataValue metadataValue, MetadataValueDTO metadataValueDTO) { + return StringUtils.equals(metadataValue.getMetadataField().getMetadataSchema().getName(), + metadataValueDTO.getSchema()) + && StringUtils.equals(metadataValue.getMetadataField().getElement(), metadataValueDTO.getElement()) + && StringUtils.equals(metadataValue.getMetadataField().getQualifier(), metadataValueDTO.getQualifier()) + && StringUtils.equals(metadataValue.getValue(), metadataValueDTO.getValue()) + && StringUtils.equals(metadataValue.getAuthority(), metadataValueDTO.getAuthority()); + } - List relatedItemMetadataValues = - getMetadataValues(relatedItem, relatedItemMetadataField); - if (relatedItemMetadataValues.isEmpty()) { - isNeedUpdateMetadata = true; - break; - } - for (MetadataValue relatedItemMetadataValue : relatedItemMetadataValues) { - if (!isContainingMetadata(item, relatedItemMetadataValue.getValue())) { - isNeedUpdateMetadata = true; - break; - } - } + private List getToBeVirtualMetadata(Context context, Item item) { + List tobeVirtualMetadata = new ArrayList<>(); + List virtualSourceFields = getEnhanceableMetadataValue(item); + for (MetadataValue virtualSourceField : virtualSourceFields) { + MetadataValueDTO mv = new MetadataValueDTO(); + mv.setSchema(VIRTUAL_METADATA_SCHEMA); + mv.setElement(VIRTUAL_SOURCE_METADATA_ELEMENT); + mv.setQualifier(getVirtualQualifier()); + String authority = virtualSourceField.getAuthority(); + Item relatedItem = null; + if (StringUtils.isNotBlank(authority)) { + mv.setValue(authority); + relatedItem = findRelatedEntityItem(context, virtualSourceField); + } else { + mv.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); + relatedItem = findRelatedEntityItem(context, virtualSourceField); + } + tobeVirtualMetadata.add(mv); + if (relatedItem == null) { + MetadataValueDTO mvRelated = new MetadataValueDTO(); + mvRelated.setSchema(VIRTUAL_METADATA_SCHEMA); + mvRelated.setElement(VIRTUAL_METADATA_ELEMENT); + mvRelated.setQualifier(getVirtualQualifier()); + mvRelated.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); + tobeVirtualMetadata.add(mvRelated); + continue; + } + List relatedItemMetadataValues = getMetadataValues(relatedItem, relatedItemMetadataField); + if (relatedItemMetadataValues.isEmpty()) { + MetadataValueDTO mvRelated = new MetadataValueDTO(); + mvRelated.setSchema(VIRTUAL_METADATA_SCHEMA); + mvRelated.setElement(VIRTUAL_METADATA_ELEMENT); + mvRelated.setQualifier(getVirtualQualifier()); + mvRelated.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); + tobeVirtualMetadata.add(mvRelated); + continue; + } + for (MetadataValue relatedItemMetadataValue : relatedItemMetadataValues) { + MetadataValueDTO mvRelated = new MetadataValueDTO(); + mvRelated.setSchema(VIRTUAL_METADATA_SCHEMA); + mvRelated.setElement(VIRTUAL_METADATA_ELEMENT); + mvRelated.setQualifier(getVirtualQualifier()); + mvRelated.setValue(relatedItemMetadataValue.getValue()); + String authorityRelated = relatedItemMetadataValue.getAuthority(); + if (StringUtils.isNotBlank(authorityRelated)) { + mvRelated.setAuthority(authorityRelated); + mvRelated.setConfidence(Choices.CF_ACCEPTED); + } + tobeVirtualMetadata.add(mvRelated); } } - - return !metadataValuesToDelete.isEmpty() || isNeedUpdateMetadata; + return tobeVirtualMetadata; } - private boolean isContainingMetadata(Item item, String value) { - return itemService.getMetadata(item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_METADATA_ELEMENT, - getVirtualQualifier(), null, true).stream() - .anyMatch(metadataValue -> metadataValue.getValue().equals(value)); + private List getCurrentVirtualMetadata(Context context, Item item) { + List currentVirtualMetadata = new ArrayList<>(); + List virtualSourceFields = getVirtualSourceFields(item); + for (MetadataValue virtualSourceField : virtualSourceFields) { + currentVirtualMetadata.add(virtualSourceField); + getRelatedVirtualField(item, virtualSourceField).ifPresent(currentVirtualMetadata::add); + } + return currentVirtualMetadata; } - - /** - * Clean obsolete virtual fields. - * - * @param context the DSpace Context - * @param item the item to check - * @return true if some metadata is deleted, false if no metadata was deleted - */ private boolean cleanObsoleteVirtualFields(Context context, Item item) throws SQLException { - + boolean result = false; List metadataValuesToDelete = getObsoleteVirtualFields(item); if (!metadataValuesToDelete.isEmpty()) { itemService.removeMetadataValues(context, item, metadataValuesToDelete); - return true; - } else { - return false; + result = true; } + return result; } - private void updateVirtualFieldsPlaces(Context context, Item item) { + private boolean updateVirtualFieldsPlaces(Context context, Item item) { + boolean result = false; List virtualSourceFields = getVirtualSourceFields(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - metadataWithPlaceToUpdate(item, virtualSourceField) - .ifPresent(updatePlaces(item, virtualSourceField)); + if (metadataWithPlaceToUpdate(item, virtualSourceField).isPresent()) { + updatePlaces(item, virtualSourceField); + result = true; + } } + return result; } private Optional metadataWithPlaceToUpdate(Item item, MetadataValue virtualSourceField) { @@ -192,10 +254,10 @@ private Optional getRelatedVirtualField(Item item, MetadataValue .findFirst(); } - private void performEnhancement(Context context, Item item) throws SQLException { - + private boolean performEnhancement(Context context, Item item) throws SQLException { + boolean result = false; if (noEnhanceableMetadata(context, item)) { - return; + return false; } for (MetadataValue metadataValue : getEnhanceableMetadataValue(item)) { @@ -207,7 +269,7 @@ private void performEnhancement(Context context, Item item) throws SQLException Item relatedItem = findRelatedEntityItem(context, metadataValue); if (relatedItem == null) { addVirtualField(context, item, PLACEHOLDER_PARENT_METADATA_VALUE); - addVirtualSourceField(context, item, PLACEHOLDER_PARENT_METADATA_VALUE); + addVirtualSourceField(context, item, metadataValue); continue; } @@ -221,9 +283,9 @@ private void performEnhancement(Context context, Item item) throws SQLException addVirtualField(context, item, relatedItemMetadataValue.getValue()); addVirtualSourceField(context, item, metadataValue); } - + result = true; } - + return result; } private boolean noEnhanceableMetadata(Context context, Item item) { @@ -234,13 +296,8 @@ private boolean noEnhanceableMetadata(Context context, Item item) { } private boolean validAuthority(Context context, MetadataValue metadataValue) { - - // FIXME: we could find a more efficient way, here we are doing twice the same action - // to understand if the enhanced item has at least an item whose references should be put in virtual fields. Item relatedItem = findRelatedEntityItem(context, metadataValue); - return Objects.nonNull(relatedItem) && - CollectionUtils.isNotEmpty( - getMetadataValues(relatedItem, relatedItemMetadataField)); + return Objects.nonNull(relatedItem); } private List getEnhanceableMetadataValue(Item item) { diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java index 2c4d1f203468..3100920dc17c 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScript.java @@ -82,14 +82,8 @@ private Iterator findItemsToEnhance() { private void enhanceItem(Item item) { - if (force) { - itemEnhancerService.forceEnhancement(context, item); - } else { - itemEnhancerService.enhance(context, item); - } - + itemEnhancerService.enhance(context, item, force); uncacheItem(item); - } private void uncacheItem(Item item) { diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java index 3822695969f2..17377f67a3dd 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/script/ItemEnhancerScriptConfiguration.java @@ -41,7 +41,9 @@ public Options getOptions() { if (options == null) { Options options = new Options(); - options.addOption("f", "force", false, "force the recalculation of all the virtual fields"); + options.addOption("f", "force", false, + "force the usage of the deep mode" + + " (always compute the enhanced metadata to verify if the item need an update)"); options.getOption("f").setType(boolean.class); options.getOption("f").setRequired(false); diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java index 133b6f74e8b7..08170448e681 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/ItemEnhancerService.java @@ -24,15 +24,9 @@ public interface ItemEnhancerService { * * @param context the DSpace Context * @param item the item to enhance + * @param deepMode false, if the implementation can assume that only the target + * item as been updated since the eventual previous computation of enhanced metadata */ - void enhance(Context context, Item item); + void enhance(Context context, Item item, boolean deepMode); - /** - * Force the computation of the enhanced values and, only if the result is different than the current state of the - * item update it. - * - * @param context the DSpace Context - * @param item the item to enhance - */ - void forceEnhancement(Context context, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java index bc040eb1f7b9..578f04305b88 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java @@ -7,23 +7,15 @@ */ package org.dspace.content.enhancer.service.impl; -import static org.dspace.content.Item.ANY; -import static org.dspace.content.enhancer.ItemEnhancer.VIRTUAL_METADATA_ELEMENT; -import static org.dspace.content.enhancer.ItemEnhancer.VIRTUAL_METADATA_SCHEMA; -import static org.dspace.content.enhancer.ItemEnhancer.VIRTUAL_SOURCE_METADATA_ELEMENT; - import java.sql.SQLException; import java.util.List; -import org.apache.commons.collections4.ListUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; -import org.dspace.content.MetadataValue; import org.dspace.content.enhancer.ItemEnhancer; import org.dspace.content.enhancer.service.ItemEnhancerService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; -import org.dspace.core.exception.SQLRuntimeException; import org.springframework.beans.factory.annotation.Autowired; /** @@ -41,15 +33,12 @@ public class ItemEnhancerServiceImpl implements ItemEnhancerService { private ItemService itemService; @Override - public void enhance(Context context, Item item) { + public void enhance(Context context, Item item, boolean deepMode) { boolean isUpdateNeeded = false; for (ItemEnhancer itemEnhancer : itemEnhancers) { if (itemEnhancer.canEnhance(context, item)) { - if (itemEnhancer.needUpdate(context,item)) { - itemEnhancer.enhance(context, item); - isUpdateNeeded = true; - } + isUpdateNeeded = isUpdateNeeded || itemEnhancer.enhance(context, item, deepMode); } } @@ -58,30 +47,6 @@ public void enhance(Context context, Item item) { } } - @Override - public void forceEnhancement(Context context, Item item) { - cleanUpVirtualFields(context, item); - enhance(context, item); - } - - private void cleanUpVirtualFields(Context context, Item item) { - - List virtualFields = getVirtualFields(item); - List virtualSourceFields = getVirtualSourceFields(item); - List metadataValuesToRemove = ListUtils.union(virtualFields, virtualSourceFields); - - if (metadataValuesToRemove.isEmpty()) { - return; - } - - try { - itemService.removeMetadataValues(context, item, metadataValuesToRemove); - } catch (SQLException e) { - throw new SQLRuntimeException(e); - } - - } - private void updateItem(Context context, Item item) { try { itemService.update(context, item); @@ -90,14 +55,6 @@ private void updateItem(Context context, Item item) { } } - private List getVirtualFields(Item item) { - return itemService.getMetadata(item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_METADATA_ELEMENT, ANY, ANY); - } - - private List getVirtualSourceFields(Item item) { - return itemService.getMetadata(item, VIRTUAL_METADATA_SCHEMA, VIRTUAL_SOURCE_METADATA_ELEMENT, ANY, ANY); - } - public List getItemEnhancers() { return itemEnhancers; } diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java b/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java index 76ef29bdb3bd..d766b9565282 100644 --- a/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemEnhancerServiceIT.java @@ -86,9 +86,9 @@ public void after() { @Test public void noUpdateRequiredTest() throws Exception { context.turnOffAuthorisationSystem(); - itemEnhancerService.enhance(context, publication); + itemEnhancerService.enhance(context, publication, false); verify(spyItemService, never()).update(any(), any()); - itemEnhancerService.forceEnhancement(context, publication); + itemEnhancerService.enhance(context, publication, true); verify(spyItemService, never()).update(any(), any()); context.restoreAuthSystemState(); } From 3640ccf87085b4df565d7103a9d9e0d70777d8df Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Mon, 15 Jan 2024 14:08:33 +0100 Subject: [PATCH 654/686] DSC-1458 store the placeholder for source metadata without authority --- .../enhancer/impl/RelatedEntityItemEnhancer.java | 6 +++++- .../enhancer/consumer/ItemEnhancerConsumerIT.java | 10 ++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java index db0fc6573f06..23c01d940fba 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java @@ -355,7 +355,11 @@ private void addVirtualField(Context context, Item item, String value) throws SQ } private void addVirtualSourceField(Context context, Item item, MetadataValue sourceValue) throws SQLException { - addVirtualSourceField(context, item, sourceValue.getAuthority()); + if (StringUtils.isNotBlank(sourceValue.getAuthority())) { + addVirtualSourceField(context, item, sourceValue.getAuthority()); + } else { + addVirtualSourceField(context, item, PLACEHOLDER_PARENT_METADATA_VALUE); + } } private void addVirtualSourceField(Context context, Item item, String sourceValueAuthority) throws SQLException { diff --git a/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java index eee35a81d045..6e152ed7b0d4 100644 --- a/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java @@ -31,6 +31,7 @@ import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; +import org.dspace.core.CrisConstants; import org.dspace.core.ReloadableEntity; import org.junit.Before; import org.junit.Test; @@ -80,9 +81,12 @@ public void testSingleMetadataValueEnhancement() throws Exception { publication = commitAndReload(publication); List metadataValues = publication.getMetadata(); - assertThat(metadataValues, hasSize(9)); + assertThat(metadataValues, hasSize(11)); assertThat(metadataValues, hasItem(with("cris.virtual.department", "4Science"))); assertThat(metadataValues, hasItem(with("cris.virtualsource.department", personId))); + assertThat(metadataValues, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(metadataValues, hasItem(with("cris.virtualsource.author-orcid", personId))); + MetadataValue virtualField = getFirstMetadataValue(publication, "cris.virtual.department"); MetadataValue virtualSourceField = getFirstMetadataValue(publication, "cris.virtualsource.department"); @@ -94,10 +98,12 @@ public void testSingleMetadataValueEnhancement() throws Exception { publication = commitAndReload(publication); metadataValues = publication.getMetadata(); - assertThat(metadataValues, hasSize(10)); + assertThat(metadataValues, hasSize(12)); assertThat(metadataValues, hasItem(with("dc.contributor.author", "Walter White", personId, 600))); assertThat(metadataValues, hasItem(with("cris.virtual.department", "4Science"))); assertThat(metadataValues, hasItem(with("cris.virtualsource.department", personId))); + assertThat(metadataValues, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(metadataValues, hasItem(with("cris.virtualsource.author-orcid", personId))); assertThat(virtualField, equalTo(getFirstMetadataValue(publication, "cris.virtual.department"))); assertThat(virtualSourceField, equalTo(getFirstMetadataValue(publication, "cris.virtualsource.department"))); From 1b1fbacbd901238427ecb5b30aea41777b2ddcf6 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Mon, 15 Jan 2024 22:15:06 +0100 Subject: [PATCH 655/686] DSC-1455 fix lazy initialization iterating over the hierarchy with the object scope --- .../src/main/java/org/dspace/curate/Curator.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index 737f2e2277e5..670c9bcbe43f 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -464,6 +464,8 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { //Then, perform this task for all Top-Level Communities in the Site // (this will recursively perform task for all objects in DSpace) for (Community subcomm : communityService.findAllTop(ctx)) { + // force a reload of the community in case a commit was performed + subcomm = ctx.reloadEntity(subcomm); if (!doCommunity(tr, subcomm)) { return false; } @@ -488,17 +490,22 @@ protected boolean doCommunity(TaskRunner tr, Community comm) throws IOException, if (!tr.run(comm)) { return false; } + Context context = curationContext(); + // force a reload in case we are committing after each object + comm = context.reloadEntity(comm); for (Community subcomm : comm.getSubcommunities()) { if (!doCommunity(tr, subcomm)) { return false; } } + // force a reload in case we are committing after each object + comm = context.reloadEntity(comm); for (Collection coll : comm.getCollections()) { + context.reloadEntity(coll); if (!doCollection(tr, coll)) { return false; } } - Context context = curationContext(); context.uncacheEntity(comm); return true; } @@ -545,7 +552,6 @@ protected void visit(DSpaceObject dso) throws IOException, SQLException { Context curCtx = curationContext(); if (curCtx != null && txScope.equals(TxScope.OBJECT)) { curCtx.commit(); - curCtx.reloadEntity(dso); } } From bba03237fecee3cfccc1678bc722bcca0969cd51 Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Mon, 20 Nov 2023 08:59:19 +0100 Subject: [PATCH 656/686] [DSC-1355] removed config for dc.relation.ispartof from authority.cfg --- dspace/config/modules/authority.cfg | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dspace/config/modules/authority.cfg b/dspace/config/modules/authority.cfg index 037cf01e5d79..e1814c1669c5 100644 --- a/dspace/config/modules/authority.cfg +++ b/dspace/config/modules/authority.cfg @@ -271,10 +271,6 @@ choices.plugin.dc.identifier.issn = ZDBAuthority choices.presentation.dc.identifier.issn = suggest authority.controlled.dc.identifier.issn = true -choices.plugin.dc.relation.ispartof = SherpaAuthority -choices.presentation.dc.relation.ispartof = suggest -authority.controlled.dc.relation.ispartof = true - authority.controlled.dc.type = true choices.plugin.dc.type = ControlledVocabularyAuthority From e4f3cf4e8c5df10fb38010522ffaa43684c47e38 Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Fri, 5 Jan 2024 09:17:05 +0100 Subject: [PATCH 657/686] [DSC-1457] Updated logic of MetadataImporter, RegistryUpdater and RegistryLoader. Now you can load all registries with one script. --- .../dspace/administer/MetadataImporter.java | 37 +++++++++++++++++-- .../org/dspace/administer/RegistryLoader.java | 10 ++++- .../dspace/storage/rdbms/RegistryUpdater.java | 5 ++- dspace/config/dspace.cfg | 2 + 4 files changed, 47 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java index 2677cb20501f..501d86af45f8 100644 --- a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java @@ -7,8 +7,13 @@ */ package org.dspace.administer; +import java.io.File; import java.io.IOException; import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.xpath.XPath; @@ -100,18 +105,39 @@ public static void main(String[] args) Options options = new Options(); options.addOption("f", "file", true, "source xml file for DC fields"); options.addOption("u", "update", false, "update an existing schema"); + options.addOption("h", "help", false, "help message"); CommandLine line = parser.parse(options, args); - if (line.hasOption('f')) { + if (line.hasOption('h')) { + usage(); + System.exit(1); + } else if (line.hasOption('f')) { String file = line.getOptionValue('f'); boolean forceUpdate = line.hasOption('u'); loadRegistry(file, forceUpdate); } else { - usage(); - System.exit(1); + boolean forceUpdate = line.hasOption('u'); + for (String file : getAllRegistryFiles()) { + loadRegistry(file, forceUpdate); + } } } + public static List getAllRegistryFiles() { + File folder = new File("config/registries"); + + if (folder.exists() && folder.isDirectory()) { + File[] files = folder.listFiles((dir, name) -> name.toLowerCase().endsWith(".xml")); + + if (files != null) { + return Arrays.stream(files) + .map(file -> "config/registries/" + file.getName()) + .collect(Collectors.toList()); + } + } + return Collections.emptyList(); + } + /** * Load the data from the specified file path into the database * @@ -285,7 +311,10 @@ private static void loadType(Context context, Node node) public static void usage() { String usage = "Use this class with the following option:\n" + " -f : specify which xml source file " + - "contains the DC fields to import.\n"; + "contains the DC fields to import.\n" + + "If you use the script without the -f parameter, then all" + + " registries will be loaded from the config/registries folder\n"; + System.out.println(usage); } } diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java index bbf320a0d5e5..ba156cafc89e 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java @@ -33,6 +33,8 @@ import org.w3c.dom.NodeList; import org.xml.sax.SAXException; + + /** * Loads the bitstream format and Dublin Core type registries into the database. * Intended for use as a command-line tool. @@ -84,7 +86,13 @@ public static void main(String[] argv) throws Exception { RegistryLoader.loadBitstreamFormats(context, argv[1]); } else if (argv[0].equalsIgnoreCase("-metadata")) { // Call MetadataImporter, as it handles Metadata schema updates - MetadataImporter.loadRegistry(argv[1], true); + if (argv.length == 1) { + for (String file : MetadataImporter.getAllRegistryFiles()) { + MetadataImporter.loadRegistry(file, true); + } + } else { + MetadataImporter.loadRegistry(argv[1], true); + } } else { System.err.println(usage); } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java index 7debf3ba449b..6a1d71b9e656 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java @@ -69,8 +69,9 @@ private void updateRegistries() { + "registries" + File.separator; // Load updates to Bitstream format registry (if any) - log.info("Updating Bitstream Format Registry based on {}bitstream-formats.xml", base); - RegistryLoader.loadBitstreamFormats(context, base + "bitstream-formats.xml"); + String bitstreamFormat = config.getProperty("registry.bitstream-formats.load"); + log.info("Updating Bitstream Format Registry based on {}{}", base, bitstreamFormat); + RegistryLoader.loadBitstreamFormats(context, base + bitstreamFormat); // Load updates to Metadata schema registries (if any) log.info("Updating Metadata Registries based on metadata type configs in {}", base); diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 05ebdd660679..dcba8455fdd4 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1012,6 +1012,8 @@ registry.metadata.load = dspace-types.xml registry.metadata.load = iiif-types.xml registry.metadata.load = bitstream-types.xml +registry.bitstream-formats.load = bitstream-formats.xml + #---------------------------------------------------------------# #-----------------UI-Related CONFIGURATIONS---------------------# #---------------------------------------------------------------# From 37cb89a28212a76412192f8841f6c4144987c35d Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Wed, 17 Jan 2024 22:56:26 +0100 Subject: [PATCH 658/686] DSC-1458 fix test and implementation related to the removal of a metadata --- .../impl/RelatedEntityItemEnhancer.java | 26 +++++++-------- .../service/impl/ItemEnhancerServiceImpl.java | 2 +- .../consumer/ItemEnhancerConsumerIT.java | 33 +++++++++++++++---- 3 files changed, 39 insertions(+), 22 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java index 23c01d940fba..1eee6daeee11 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java @@ -15,7 +15,6 @@ import java.util.Objects; import java.util.Optional; import java.util.UUID; -import java.util.function.Consumer; import java.util.function.Predicate; import org.apache.commons.lang3.StringUtils; @@ -64,8 +63,8 @@ public boolean enhance(Context context, Item item, boolean deepMode) { if (!deepMode) { try { result = cleanObsoleteVirtualFields(context, item); - result = result || updateVirtualFieldsPlaces(context, item); - result = result || performEnhancement(context, item); + result = updateVirtualFieldsPlaces(context, item) || result; + result = performEnhancement(context, item) || result; } catch (SQLException e) { LOGGER.error("An error occurs enhancing item with id {}: {}", item.getID(), e.getMessage(), e); throw new SQLRuntimeException(e); @@ -131,7 +130,6 @@ private List getToBeVirtualMetadata(Context context, Item item relatedItem = findRelatedEntityItem(context, virtualSourceField); } else { mv.setValue(PLACEHOLDER_PARENT_METADATA_VALUE); - relatedItem = findRelatedEntityItem(context, virtualSourceField); } tobeVirtualMetadata.add(mv); if (relatedItem == null) { @@ -195,8 +193,9 @@ private boolean updateVirtualFieldsPlaces(Context context, Item item) { boolean result = false; List virtualSourceFields = getVirtualSourceFields(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - if (metadataWithPlaceToUpdate(item, virtualSourceField).isPresent()) { - updatePlaces(item, virtualSourceField); + Optional metadataWithPlaceToUpdate = metadataWithPlaceToUpdate(item, virtualSourceField); + if (metadataWithPlaceToUpdate.isPresent()) { + updatePlaces(item, metadataWithPlaceToUpdate.get(), virtualSourceField); result = true; } } @@ -213,12 +212,10 @@ private Predicate hasToUpdatePlace(MetadataValue virtualSourceFie return metadataValue -> metadataValue.getPlace() != virtualSourceField.getPlace(); } - private Consumer updatePlaces(Item item, MetadataValue virtualSourceField) { - return mv -> { - virtualSourceField.setPlace(mv.getPlace()); - getRelatedVirtualField(item, mv) - .ifPresent(relatedMv -> relatedMv.setPlace(mv.getPlace())); - }; + private void updatePlaces(Item item, MetadataValue mv, MetadataValue virtualSourceField) { + virtualSourceField.setPlace(mv.getPlace()); + getRelatedVirtualField(item, mv) + .ifPresent(relatedMv -> relatedMv.setPlace(mv.getPlace())); } private Optional findEnhanceableValue(MetadataValue virtualSourceField, Item item) { @@ -233,7 +230,7 @@ private List getObsoleteVirtualFields(Item item) { List virtualSourceFields = getVirtualSourceFields(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - if (!isPlaceholder(virtualSourceField) && isRelatedSourceNoMorePresent(item, virtualSourceField)) { + if (isRelatedSourceNoMorePresent(item, virtualSourceField)) { obsoleteVirtualFields.add(virtualSourceField); getRelatedVirtualField(item, virtualSourceField).ifPresent(obsoleteVirtualFields::add); } @@ -321,7 +318,8 @@ private boolean isPlaceholderAtPlace(List metadataValues, int pla } private boolean hasAuthorityEqualsTo(MetadataValue metadataValue, String authority) { - return Objects.equals(metadataValue.getAuthority(), authority); + return Objects.equals(metadataValue.getAuthority(), authority) + || Objects.equals(PLACEHOLDER_PARENT_METADATA_VALUE, authority); } private Item findRelatedEntityItem(Context context, MetadataValue metadataValue) { diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java index 578f04305b88..e751a431ac37 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/service/impl/ItemEnhancerServiceImpl.java @@ -38,7 +38,7 @@ public void enhance(Context context, Item item, boolean deepMode) { for (ItemEnhancer itemEnhancer : itemEnhancers) { if (itemEnhancer.canEnhance(context, item)) { - isUpdateNeeded = isUpdateNeeded || itemEnhancer.enhance(context, item, deepMode); + isUpdateNeeded = itemEnhancer.enhance(context, item, deepMode) || isUpdateNeeded; } } diff --git a/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java b/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java index 6e152ed7b0d4..176f055a4468 100644 --- a/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java +++ b/dspace-api/src/test/java/org/dspace/content/enhancer/consumer/ItemEnhancerConsumerIT.java @@ -31,7 +31,6 @@ import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; -import org.dspace.core.CrisConstants; import org.dspace.core.ReloadableEntity; import org.junit.Before; import org.junit.Test; @@ -142,7 +141,7 @@ public void testManyMetadataValuesEnhancement() throws Exception { publication = commitAndReload(publication); List values = publication.getMetadata(); - assertThat(values, hasSize(18)); + assertThat(values, hasSize(26)); assertThat(values, hasItem(with("dc.contributor.author", "Red Smith"))); assertThat(values, hasItem(with("dc.contributor.author", "Walter White", person1.getID().toString(), 1, 600))); assertThat(values, hasItem(with("dc.contributor.author", "John Smith", person2.getID().toString(), 2, 600))); @@ -155,9 +154,18 @@ public void testManyMetadataValuesEnhancement() throws Exception { assertThat(values, hasItem(with("cris.virtualsource.department", person2.getID().toString(), 2))); assertThat(values, hasItem(with("cris.virtual.department", "University of Rome", 3))); assertThat(values, hasItem(with("cris.virtualsource.department", person3.getID().toString(), 3))); - + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 0))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 0))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person1.getID().toString(), 1))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 2))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person2.getID().toString(), 2))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 3))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person3.getID().toString(), 3))); assertThat(getMetadataValues(publication, "cris.virtual.department"), hasSize(4)); assertThat(getMetadataValues(publication, "cris.virtualsource.department"), hasSize(4)); + assertThat(getMetadataValues(publication, "cris.virtual.author-orcid"), hasSize(4)); + assertThat(getMetadataValues(publication, "cris.virtualsource.author-orcid"), hasSize(4)); } @@ -195,7 +203,7 @@ public void testEnhancementAfterMetadataAddition() throws Exception { publication = commitAndReload(publication); metadataValues = publication.getMetadata(); - assertThat(metadataValues, hasSize(9)); + assertThat(metadataValues, hasSize(11)); assertThat(metadataValues, hasItem(with("dc.contributor.author", "Walter White", personId, 600))); assertThat(metadataValues, hasItem(with("cris.virtual.department", "4Science"))); assertThat(metadataValues, hasItem(with("cris.virtualsource.department", personId))); @@ -234,7 +242,7 @@ public void testEnhancementWithMetadataRemoval() throws Exception { publication = commitAndReload(publication); List values = publication.getMetadata(); - assertThat(values, hasSize(15)); + assertThat(values, hasSize(21)); assertThat(values, hasItem(with("dc.contributor.author", "Walter White", person1.getID().toString(), 0, 600))); assertThat(values, hasItem(with("dc.contributor.author", "John Smith", person2.getID().toString(), 1, 600))); assertThat(values, hasItem(with("dc.contributor.author", "Jesse Pinkman", person3.getID().toString(), 2, 600))); @@ -244,6 +252,12 @@ public void testEnhancementWithMetadataRemoval() throws Exception { assertThat(values, hasItem(with("cris.virtualsource.department", person2.getID().toString(), 1))); assertThat(values, hasItem(with("cris.virtual.department", "University of Rome", 2))); assertThat(values, hasItem(with("cris.virtualsource.department", person3.getID().toString(), 2))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person1.getID().toString()))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person2.getID().toString(), 1))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person3.getID().toString(), 2))); assertThat(getMetadataValues(publication, "cris.virtual.department"), hasSize(3)); assertThat(getMetadataValues(publication, "cris.virtualsource.department"), hasSize(3)); @@ -257,16 +271,21 @@ public void testEnhancementWithMetadataRemoval() throws Exception { publication = commitAndReload(publication); values = publication.getMetadata(); - assertThat(values, hasSize(12)); + assertThat(values, hasSize(16)); assertThat(values, hasItem(with("dc.contributor.author", "Walter White", person1.getID().toString(), 0, 600))); assertThat(values, hasItem(with("dc.contributor.author", "Jesse Pinkman", person3.getID().toString(), 1, 600))); assertThat(values, hasItem(with("cris.virtual.department", "4Science"))); assertThat(values, hasItem(with("cris.virtualsource.department", person1.getID().toString()))); assertThat(values, hasItem(with("cris.virtual.department", "University of Rome", 1))); assertThat(values, hasItem(with("cris.virtualsource.department", person3.getID().toString(), 1))); - + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 0))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person1.getID().toString(), 0))); + assertThat(values, hasItem(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE, 1))); + assertThat(values, hasItem(with("cris.virtualsource.author-orcid", person3.getID().toString(), 1))); assertThat(getMetadataValues(publication, "cris.virtual.department"), hasSize(2)); assertThat(getMetadataValues(publication, "cris.virtualsource.department"), hasSize(2)); + assertThat(getMetadataValues(publication, "cris.virtual.author-orcid"), hasSize(2)); + assertThat(getMetadataValues(publication, "cris.virtualsource.author-orcid"), hasSize(2)); } From 0e88cf56b4a154a1b4f6075fa05fef5654b62007 Mon Sep 17 00:00:00 2001 From: Mattia Vianelli Date: Thu, 18 Jan 2024 17:24:52 +0100 Subject: [PATCH 659/686] DSC-1488 Added test for NPE in CrisLayoutBoxServiceImpl --- .../impl/CrisLayoutBoxServiceImplTest.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java index fd61aeb0a762..67872fb52330 100644 --- a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java @@ -291,6 +291,25 @@ public void testIiifBoxHasNoContentWithMetadataUndefined() { assertFalse(crisLayoutBoxService.hasContent(context, box, item)); } + @Test + public void testSingleMetadataboxBitstreamWithoutField() { + + CrisLayoutBox singleBitstreamBox = new CrisLayoutBox(); + singleBitstreamBox.setShortname("File"); + singleBitstreamBox.setType(null); + Item item = item(); + + CrisLayoutFieldBitstream fieldBitstream = new CrisLayoutFieldBitstream(); + fieldBitstream.setBundle("ORIGINAL"); + fieldBitstream.setMetadataValue(null); + fieldBitstream.setMetadataField(null); + fieldBitstream.setRendering("attachment"); + + singleBitstreamBox.addLayoutField(fieldBitstream); + + assertThat(crisLayoutBoxService.hasContent(context, singleBitstreamBox, item), is(true)); + } + private CrisLayoutBox crisLayoutMetadataBox(String shortname, MetadataField... metadataFields) { return crisLayoutBox(shortname, CrisLayoutBoxTypes.METADATA.name(), metadataFields); } From 6404c730371af4421ede693a277a3479e8d8b457 Mon Sep 17 00:00:00 2001 From: Mattia Vianelli Date: Thu, 18 Jan 2024 18:01:25 +0100 Subject: [PATCH 660/686] DSC-1488 Test fix --- .../layout/service/impl/CrisLayoutBoxServiceImplTest.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java index 67872fb52330..ce539cda67aa 100644 --- a/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImplTest.java @@ -292,12 +292,14 @@ public void testIiifBoxHasNoContentWithMetadataUndefined() { } @Test - public void testSingleMetadataboxBitstreamWithoutField() { + public void testSingleMetadataboxBitstreamWithoutField() throws SQLException { CrisLayoutBox singleBitstreamBox = new CrisLayoutBox(); singleBitstreamBox.setShortname("File"); singleBitstreamBox.setType(null); + Item item = item(); + Bitstream bitstream = mock(Bitstream.class); CrisLayoutFieldBitstream fieldBitstream = new CrisLayoutFieldBitstream(); fieldBitstream.setBundle("ORIGINAL"); @@ -307,7 +309,11 @@ public void testSingleMetadataboxBitstreamWithoutField() { singleBitstreamBox.addLayoutField(fieldBitstream); + when(bitstreamService.findShowableByItem(context, item.getID(), "ORIGINAL", Map.of())) + .thenReturn(List.of(bitstream)); + assertThat(crisLayoutBoxService.hasContent(context, singleBitstreamBox, item), is(true)); + } private CrisLayoutBox crisLayoutMetadataBox(String shortname, MetadataField... metadataFields) { From 71c8a1931a6039aa2c059a521c3a2594aaafe24f Mon Sep 17 00:00:00 2001 From: Mattia Vianelli Date: Thu, 18 Jan 2024 18:02:09 +0100 Subject: [PATCH 661/686] DSC-1488 Added fix for NPE in CrisLayoutBoxServiceImpl --- .../layout/service/impl/CrisLayoutBoxServiceImpl.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java index 269dd6601853..acd5d38c0b41 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutBoxServiceImpl.java @@ -208,7 +208,13 @@ private boolean isMetadataFieldPresent(DSpaceObject item, MetadataField metadata } private boolean isBitstreamPresent(Context context, Item item, CrisLayoutFieldBitstream field) { - Map filters = Map.of(field.getMetadataField().toString('.'), field.getMetadataValue()); + + Map filters = Map.of(); + + if (field.getMetadataField() != null) { + filters = Map.of(field.getMetadataField().toString('.'), field.getMetadataValue()); + } + try { return bitstreamService.findShowableByItem(context, item.getID(), field.getBundle(), filters).size() > 0; } catch (SQLException e) { From 76c42ee8d659f269d0d10d4808663b077ddb0579 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Thu, 18 Jan 2024 18:52:16 +0100 Subject: [PATCH 662/686] DSC-1458 fix misbehaviours in case an author or external authors appear multiple time the metadata --- .../impl/RelatedEntityItemEnhancer.java | 67 ++++++++++++++----- 1 file changed, 50 insertions(+), 17 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java index 1eee6daeee11..f17e36ee90a9 100644 --- a/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java +++ b/dspace-api/src/main/java/org/dspace/content/enhancer/impl/RelatedEntityItemEnhancer.java @@ -15,7 +15,6 @@ import java.util.Objects; import java.util.Optional; import java.util.UUID; -import java.util.function.Predicate; import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; @@ -192,8 +191,10 @@ private boolean cleanObsoleteVirtualFields(Context context, Item item) throws SQ private boolean updateVirtualFieldsPlaces(Context context, Item item) { boolean result = false; List virtualSourceFields = getVirtualSourceFields(item); + List enhanceableMetadataValue = getEnhanceableMetadataValue(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - Optional metadataWithPlaceToUpdate = metadataWithPlaceToUpdate(item, virtualSourceField); + Optional metadataWithPlaceToUpdate = metadataWithPlaceToUpdate(item, + enhanceableMetadataValue, virtualSourceField); if (metadataWithPlaceToUpdate.isPresent()) { updatePlaces(item, metadataWithPlaceToUpdate.get(), virtualSourceField); result = true; @@ -202,14 +203,14 @@ private boolean updateVirtualFieldsPlaces(Context context, Item item) { return result; } - private Optional metadataWithPlaceToUpdate(Item item, MetadataValue virtualSourceField) { - return findEnhanceableValue(virtualSourceField, item) - .filter(hasToUpdatePlace(virtualSourceField)) - .stream().findFirst(); + private Optional metadataWithPlaceToUpdate(Item item, List enhanceableMetadataValue, + MetadataValue virtualSourceField) { + return findMetadataValueToUpdatePlace(enhanceableMetadataValue, virtualSourceField, + item); } - private Predicate hasToUpdatePlace(MetadataValue virtualSourceField) { - return metadataValue -> metadataValue.getPlace() != virtualSourceField.getPlace(); + private boolean hasToUpdatePlace(MetadataValue metadataValue, MetadataValue virtualSourceField) { + return metadataValue.getPlace() != virtualSourceField.getPlace(); } private void updatePlaces(Item item, MetadataValue mv, MetadataValue virtualSourceField) { @@ -218,10 +219,23 @@ private void updatePlaces(Item item, MetadataValue mv, MetadataValue virtualSour .ifPresent(relatedMv -> relatedMv.setPlace(mv.getPlace())); } - private Optional findEnhanceableValue(MetadataValue virtualSourceField, Item item) { - return getEnhanceableMetadataValue(item).stream() - .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())) - .findFirst(); + private Optional findMetadataValueToUpdatePlace(List enhanceableMetadataValue, + MetadataValue virtualSourceField, Item item) { + Optional exactMatch = enhanceableMetadataValue.stream() + .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, + virtualSourceField.getValue()) && !hasToUpdatePlace(metadataValue, virtualSourceField)) + .findFirst(); + if (exactMatch.isPresent()) { + enhanceableMetadataValue.remove(exactMatch.get()); + return Optional.empty(); + } else { + Optional authorityOnlyMatch = enhanceableMetadataValue.stream() + .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, + virtualSourceField.getValue()) && hasToUpdatePlace(metadataValue, virtualSourceField)) + .findFirst(); + enhanceableMetadataValue.remove(authorityOnlyMatch.get()); + return authorityOnlyMatch; + } } private List getObsoleteVirtualFields(Item item) { @@ -229,8 +243,9 @@ private List getObsoleteVirtualFields(Item item) { List obsoleteVirtualFields = new ArrayList<>(); List virtualSourceFields = getVirtualSourceFields(item); + List enhanceableMetadata = getEnhanceableMetadataValue(item); for (MetadataValue virtualSourceField : virtualSourceFields) { - if (isRelatedSourceNoMorePresent(item, virtualSourceField)) { + if (isRelatedSourceNoMorePresent(item, enhanceableMetadata, virtualSourceField)) { obsoleteVirtualFields.add(virtualSourceField); getRelatedVirtualField(item, virtualSourceField).ifPresent(obsoleteVirtualFields::add); } @@ -240,9 +255,26 @@ private List getObsoleteVirtualFields(Item item) { } - private boolean isRelatedSourceNoMorePresent(Item item, MetadataValue virtualSourceField) { - return getEnhanceableMetadataValue(item).stream() - .noneMatch(metadataValue -> hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())); + /** + * This method will look in the enhanceableMetadata if the source metadata is still present. If so, it will remove + * form the list as it would not be used to validate other potential duplicate source metadata + * + * @param item + * @param enhanceableMetadata + * @param virtualSourceField + * @return true if the metadata containing a source of enhancement is still present in the list of the metadata to + * use to enhance the item + */ + private boolean isRelatedSourceNoMorePresent(Item item, List enhanceableMetadata, + MetadataValue virtualSourceField) { + Optional mv = enhanceableMetadata.stream() + .filter(metadataValue -> hasAuthorityEqualsTo(metadataValue, virtualSourceField.getValue())) + .findFirst(); + if (mv.isPresent()) { + enhanceableMetadata.remove(mv.get()); + return false; + } + return true; } private Optional getRelatedVirtualField(Item item, MetadataValue virtualSourceField) { @@ -319,7 +351,8 @@ private boolean isPlaceholderAtPlace(List metadataValues, int pla private boolean hasAuthorityEqualsTo(MetadataValue metadataValue, String authority) { return Objects.equals(metadataValue.getAuthority(), authority) - || Objects.equals(PLACEHOLDER_PARENT_METADATA_VALUE, authority); + || (StringUtils.isBlank(metadataValue.getAuthority()) + && Objects.equals(PLACEHOLDER_PARENT_METADATA_VALUE, authority)); } private Item findRelatedEntityItem(Context context, MetadataValue metadataValue) { From cdcd95951f6d5edf227aacdaff9a08c269692b67 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Fri, 19 Jan 2024 10:47:44 +0100 Subject: [PATCH 663/686] DSC-1458 fix test to reflect the fact that enhanced metadata are always included also for external authors --- .../app/bulkimport/service/BulkImportWorkbookBuilderIT.java | 4 ++-- .../src/test/java/org/dspace/harvest/OAIHarvesterIT.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java index a7006b6a8a10..9de061db8f94 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java @@ -209,7 +209,7 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { Item firstItem = getItemFromMessage(handler.getInfoMessages().get(7)); assertThat(firstItem, notNullValue()); - assertThat(firstItem.getMetadata(), hasSize(14)); + assertThat(firstItem.getMetadata(), hasSize(16)); assertThat(firstItem.getMetadata(), hasItems( with("dc.title", "Test Publication"), with("dc.date.issued", "2020/02/15"), @@ -227,7 +227,7 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { Item secondItem = getItemFromMessage(handler.getInfoMessages().get(10)); assertThat(secondItem, notNullValue()); - assertThat(secondItem.getMetadata(), hasSize(14)); + assertThat(secondItem.getMetadata(), hasSize(16)); assertThat(secondItem.getMetadata(), hasItems( with("dc.title", "Second Publication"), with("dc.date.issued", "2022/02/15"), diff --git a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java index e58788104aa7..c180b77dc26e 100644 --- a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java +++ b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java @@ -769,7 +769,7 @@ public void testRunHarvestWithPublicationAndThenPerson() throws Exception { Item publication = publications.get(0); List values = publication.getMetadata(); - assertThat(values, hasSize(17)); + assertThat(values, hasSize(19)); assertThat(values, hasItems(with("dc.title", "Test Publication"))); assertThat(values, hasItems(with("dc.type", "Controlled Vocabulary for Resource Type Genres::text"))); @@ -859,7 +859,7 @@ public void testRunHarvestWithPersonAndThenPublication() throws Exception { Item person = findItemByOaiID("oai:test-harvest:Persons/123", personCollection); List values = person.getMetadata(); - assertThat(values, hasSize(12)); + assertThat(values, hasSize(14)); assertThat(values, hasItems(with("dc.title", "Manghi, Paolo"))); assertThat(values, hasItems(with("cris.sourceId", "test-harvest::123"))); assertThat(values, hasItems(with("dspace.entity.type", "Person"))); From ba6cdbf1f78a6f099b30336aa8f3bfe534207011 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sat, 20 Jan 2024 15:51:06 +0100 Subject: [PATCH 664/686] DURACOM-225 add IT for the media filter script --- .../dspace/app/mediafilter/MediaFilterIT.java | 237 ++++++++++++++++++ .../org/dspace/builder/BitstreamBuilder.java | 14 ++ 2 files changed, 251 insertions(+) create mode 100644 dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java diff --git a/dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java b/dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java new file mode 100644 index 000000000000..aef2476fdc45 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/mediafilter/MediaFilterIT.java @@ -0,0 +1,237 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.junit.Before; +import org.junit.Test; + +/** + * Tests of {@link MediaFilterScript}. + * + * @author Andrea Bollini + */ +public class MediaFilterIT extends AbstractIntegrationTestWithDatabase { + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + protected Community topComm1; + protected Community topComm2; + protected Community childComm1_1; + protected Community childComm1_2; + protected Collection col1_1; + protected Collection col1_2; + protected Collection col1_1_1; + protected Collection col1_1_2; + protected Collection col1_2_1; + protected Collection col1_2_2; + protected Collection col2_1; + protected Item item1_1_a; + protected Item item1_1_b; + protected Item item1_2_a; + protected Item item1_2_b; + protected Item item1_1_1_a; + protected Item item1_1_1_b; + protected Item item1_1_2_a; + protected Item item1_1_2_b; + protected Item item1_2_1_a; + protected Item item1_2_1_b; + protected Item item1_2_2_a; + protected Item item1_2_2_b; + protected Item item2_1_a; + protected Item item2_1_b; + + @Before + public void setup() throws IOException, SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + topComm1 = CommunityBuilder.createCommunity(context).withName("Parent Community1").build(); + topComm2 = CommunityBuilder.createCommunity(context).withName("Parent Community2").build(); + childComm1_1 = CommunityBuilder.createCommunity(context).withName("Child Community1_1") + .addParentCommunity(context, topComm1).build(); + childComm1_2 = CommunityBuilder.createCommunity(context).withName("Child Community1_2") + .addParentCommunity(context, topComm1).build(); + col1_1 = CollectionBuilder.createCollection(context, topComm1).withName("Collection 1_1").build(); + col1_2 = CollectionBuilder.createCollection(context, topComm1).withName("Collection 1_2").build(); + col1_1_1 = CollectionBuilder.createCollection(context, childComm1_1).withName("Collection 1_1_1").build(); + col1_1_2 = CollectionBuilder.createCollection(context, childComm1_1).withName("Collection 1_1_2").build(); + col1_2_1 = CollectionBuilder.createCollection(context, childComm1_2).withName("Collection 1_1_1").build(); + col1_2_2 = CollectionBuilder.createCollection(context, childComm1_2).withName("Collection 1_2").build(); + col2_1 = CollectionBuilder.createCollection(context, topComm2).withName("Collection 2_1").build(); + + // Create two items in each collection, one with the test.csv file and one with the test.txt file + item1_1_a = ItemBuilder.createItem(context, col1_1).withTitle("Item 1_1_a").withIssueDate("2017-10-17").build(); + item1_1_b = ItemBuilder.createItem(context, col1_1).withTitle("Item 1_1_b").withIssueDate("2017-10-17").build(); + item1_1_1_a = ItemBuilder.createItem(context, col1_1_1).withTitle("Item 1_1_1_a").withIssueDate("2017-10-17") + .build(); + item1_1_1_b = ItemBuilder.createItem(context, col1_1_1).withTitle("Item 1_1_1_b").withIssueDate("2017-10-17") + .build(); + item1_1_2_a = ItemBuilder.createItem(context, col1_1_2).withTitle("Item 1_1_2_a").withIssueDate("2017-10-17") + .build(); + item1_1_2_b = ItemBuilder.createItem(context, col1_1_2).withTitle("Item 1_1_2_b").withIssueDate("2017-10-17") + .build(); + item1_2_a = ItemBuilder.createItem(context, col1_2).withTitle("Item 1_2_a").withIssueDate("2017-10-17").build(); + item1_2_b = ItemBuilder.createItem(context, col1_2).withTitle("Item 1_2_b").withIssueDate("2017-10-17").build(); + item1_2_1_a = ItemBuilder.createItem(context, col1_2_1).withTitle("Item 1_2_1_a").withIssueDate("2017-10-17") + .build(); + item1_2_1_b = ItemBuilder.createItem(context, col1_2_1).withTitle("Item 1_2_1_b").withIssueDate("2017-10-17") + .build(); + item1_2_2_a = ItemBuilder.createItem(context, col1_2_2).withTitle("Item 1_2_2_a").withIssueDate("2017-10-17") + .build(); + item1_2_2_b = ItemBuilder.createItem(context, col1_2_2).withTitle("Item 1_2_2_b").withIssueDate("2017-10-17") + .build(); + item2_1_a = ItemBuilder.createItem(context, col2_1).withTitle("Item 2_1_a").withIssueDate("2017-10-17").build(); + item2_1_b = ItemBuilder.createItem(context, col2_1).withTitle("Item 2_1_b").withIssueDate("2017-10-17").build(); + addBitstream(item1_1_a, "test.csv"); + addBitstream(item1_1_b, "test.txt"); + addBitstream(item1_2_a, "test.csv"); + addBitstream(item1_2_b, "test.txt"); + addBitstream(item1_1_1_a, "test.csv"); + addBitstream(item1_1_1_b, "test.txt"); + addBitstream(item1_1_2_a, "test.csv"); + addBitstream(item1_1_2_b, "test.txt"); + addBitstream(item1_2_1_a, "test.csv"); + addBitstream(item1_2_1_b, "test.txt"); + addBitstream(item1_2_2_a, "test.csv"); + addBitstream(item1_2_2_b, "test.txt"); + addBitstream(item2_1_a, "test.csv"); + addBitstream(item2_1_b, "test.txt"); + context.restoreAuthSystemState(); + } + + private void addBitstream(Item item, String filename) throws SQLException, AuthorizeException, IOException { + BitstreamBuilder.createBitstream(context, item, getClass().getResourceAsStream(filename)).withName(filename) + .guessFormat().build(); + } + + @Test + public void mediaFilterScriptAllItemsTest() throws Exception { + performMediaFilterScript(null); + Iterator items = itemService.findAll(context); + while (items.hasNext()) { + Item item = items.next(); + checkItemHasBeenProcessed(item); + } + } + + @Test + public void mediaFilterScriptIdentifiersTest() throws Exception { + // process the item 1_1_a and verify that no other items has been processed using the "closer" one + performMediaFilterScript(item1_1_a); + checkItemHasBeenProcessed(item1_1_a); + checkItemHasBeenNotProcessed(item1_1_b); + // process the collection 1_1_1 and verify that items in another collection has not been processed + performMediaFilterScript(col1_1_1); + checkItemHasBeenProcessed(item1_1_1_a); + checkItemHasBeenProcessed(item1_1_1_b); + checkItemHasBeenNotProcessed(item1_1_2_a); + checkItemHasBeenNotProcessed(item1_1_2_b); + // process a top community with only collections + performMediaFilterScript(topComm2); + checkItemHasBeenProcessed(item2_1_a); + checkItemHasBeenProcessed(item2_1_b); + // verify that the other items have not been processed yet + checkItemHasBeenNotProcessed(item1_1_b); + checkItemHasBeenNotProcessed(item1_2_a); + checkItemHasBeenNotProcessed(item1_2_b); + checkItemHasBeenNotProcessed(item1_1_2_a); + checkItemHasBeenNotProcessed(item1_1_2_b); + checkItemHasBeenNotProcessed(item1_2_1_a); + checkItemHasBeenNotProcessed(item1_2_1_b); + checkItemHasBeenNotProcessed(item1_2_2_a); + checkItemHasBeenNotProcessed(item1_2_2_b); + // process a more structured community and verify that all the items at all levels are processed + performMediaFilterScript(topComm1); + // items that were already processed should stay processed + checkItemHasBeenProcessed(item1_1_a); + checkItemHasBeenProcessed(item1_1_1_a); + checkItemHasBeenProcessed(item1_1_1_b); + // residual items should have been processed as well now + checkItemHasBeenProcessed(item1_1_b); + checkItemHasBeenProcessed(item1_2_a); + checkItemHasBeenProcessed(item1_2_b); + checkItemHasBeenProcessed(item1_1_2_a); + checkItemHasBeenProcessed(item1_1_2_b); + checkItemHasBeenProcessed(item1_2_1_a); + checkItemHasBeenProcessed(item1_2_1_b); + checkItemHasBeenProcessed(item1_2_2_a); + checkItemHasBeenProcessed(item1_2_2_b); + } + + private void checkItemHasBeenNotProcessed(Item item) throws IOException, SQLException, AuthorizeException { + List textBundles = item.getBundles("TEXT"); + assertTrue("The item " + item.getName() + " should NOT have the TEXT bundle", textBundles.size() == 0); + } + + private void checkItemHasBeenProcessed(Item item) throws IOException, SQLException, AuthorizeException { + String expectedFileName = StringUtils.endsWith(item.getName(), "_a") ? "test.csv.txt" : "test.txt.txt"; + String expectedContent = StringUtils.endsWith(item.getName(), "_a") ? "data3,3" : "quick brown fox"; + List textBundles = item.getBundles("TEXT"); + assertTrue("The item " + item.getName() + " has NOT the TEXT bundle", textBundles.size() == 1); + List bitstreams = textBundles.get(0).getBitstreams(); + assertTrue("The item " + item.getName() + " has NOT exactly 1 bitstream in the TEXT bundle", + bitstreams.size() == 1); + assertTrue("The text bistream in the " + item.getName() + " is NOT named properly [" + expectedFileName + "]", + StringUtils.equals(bitstreams.get(0).getName(), expectedFileName)); + assertTrue("The text bistream in the " + item.getName() + " doesn't contain the proper content [" + + expectedContent + "]", StringUtils.contains(getContent(bitstreams.get(0)), expectedContent)); + } + + private CharSequence getContent(Bitstream bitstream) throws IOException, SQLException, AuthorizeException { + try (InputStream input = bitstreamService.retrieve(context, bitstream)) { + return IOUtils.toString(input, "UTF-8"); + } + } + + private void performMediaFilterScript(DSpaceObject dso) throws Exception { + if (dso != null) { + runDSpaceScript("filter-media", "-i", dso.getHandle()); + } else { + runDSpaceScript("filter-media"); + } + // reload our items to see the changes + item1_1_a = context.reloadEntity(item1_1_a); + item1_1_b = context.reloadEntity(item1_1_b); + item1_2_a = context.reloadEntity(item1_2_a); + item1_2_b = context.reloadEntity(item1_2_b); + item1_1_1_a = context.reloadEntity(item1_1_1_a); + item1_1_1_b = context.reloadEntity(item1_1_1_b); + item1_1_2_a = context.reloadEntity(item1_1_2_a); + item1_1_2_b = context.reloadEntity(item1_1_2_b); + item1_2_1_a = context.reloadEntity(item1_2_1_a); + item1_2_1_b = context.reloadEntity(item1_2_1_b); + item1_2_2_a = context.reloadEntity(item1_2_2_a); + item1_2_2_b = context.reloadEntity(item1_2_2_b); + item2_1_a = context.reloadEntity(item2_1_a); + item2_1_b = context.reloadEntity(item2_1_b); + + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java index a4775c4b8358..dbcf3a7972b7 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java @@ -20,6 +20,7 @@ import org.dspace.content.MetadataField; import org.dspace.content.MetadataValue; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.DSpaceObjectService; import org.dspace.content.service.MetadataValueService; import org.dspace.core.Constants; @@ -167,6 +168,19 @@ public BitstreamBuilder withMimeType(String mimeType) throws SQLException { return this; } + /** + * Guess the bitstream format as during the submission via the + * {@link BitstreamFormatService#guessFormat(Context, Bitstream)} + * + * @return the BitstreamBuilder with the format set according to + * {@link BitstreamFormatService#guessFormat(Context, Bitstream)} + * @throws SQLException + */ + public BitstreamBuilder guessFormat() throws SQLException { + bitstream.setFormat(context, bitstreamFormatService.guessFormat(context, bitstream)); + return this; + } + public BitstreamBuilder withFormat(String format) throws SQLException { bitstreamService.addMetadata(context, bitstream, "dc", "format", null, null, format); From c706135fa22b97a554c0b4ef7aea60a8db8138d1 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sat, 20 Jan 2024 17:51:33 +0100 Subject: [PATCH 665/686] DSC-1496 fix lazy initialization traversing comms/colls --- .../app/mediafilter/MediaFilterServiceImpl.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index 3f898bd7d9f1..974dc784bd4f 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -132,12 +132,18 @@ public void applyFiltersAllItems(Context context) throws Exception { @Override public void applyFiltersCommunity(Context context, Community community) throws Exception { //only apply filters if community not in skip-list + // ensure that the community is attached to the current hibernate session + // as we are committing after each item (handles, sub-communties and + // collections are lazy attributes) + community = context.reloadEntity(community); if (!inSkipList(community.getHandle())) { List subcommunities = community.getSubcommunities(); for (Community subcommunity : subcommunities) { applyFiltersCommunity(context, subcommunity); } - + // ensure that the community is attached to the current hibernate session + // as we are committing after each item + community = context.reloadEntity(community); List collections = community.getCollections(); for (Collection collection : collections) { applyFiltersCollection(context, collection); @@ -148,6 +154,9 @@ public void applyFiltersCommunity(Context context, Community community) @Override public void applyFiltersCollection(Context context, Collection collection) throws Exception { + // ensure that the collection is attached to the current hibernate session + // as we are committing after each item (handles are lazy attributes) + collection = context.reloadEntity(collection); //only apply filters if collection not in skip-list if (!inSkipList(collection.getHandle())) { Iterator itemIterator = itemService.findAllByCollection(context, collection); From 1f1f6f4e84c51373b910b07f7c30fd7c82ff2cf9 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Tue, 23 Jan 2024 12:19:13 +0100 Subject: [PATCH 666/686] [DSC-963] Addresses changes of the PR#8797 --- dspace-server-webapp/pom.xml | 13 +- .../GenericAuthorizationFeatureIT.java | 547 +++++------------- .../resources/application-test.properties | 1 + dspace/modules/server-boot/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- 5 files changed, 171 insertions(+), 394 deletions(-) diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index fe47ad87ef3c..0a0b394d576a 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -76,10 +76,21 @@ ${basedir}/src/main/resources - **/*.properties + **/*application*.properties + **/*dspace*.properties true + + ${basedir}/src/main/resources + + **/*application*.properties + **/*dspace*.properties + + + **/*.properties + + ${basedir}/src/main/resources diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java index 7872067a31e2..0ac7eea4250d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java @@ -37,6 +37,7 @@ import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.ResultActions; /** * Test for the following authorization features: @@ -52,6 +53,8 @@ */ public class GenericAuthorizationFeatureIT extends AbstractControllerIntegrationTest { + private static final int SIZE = 100; + @Autowired ConfigurationService configurationService; @@ -208,215 +211,163 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception { String siteId = ContentServiceFactory.getInstance().getSiteService().findSite(context).getID().toString(); // Verify the general admin has this feature on the site - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/sites/" + siteId)) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/sites/" + siteId) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the site - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/sites/" + siteId)) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/sites/" + siteId) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on community A - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(adminToken,"http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community A - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community AA - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin doesn’t have this feature on community A - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on community B - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityB.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityB.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on collection X - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on collection X - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on collection X - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on collection X - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on collection Y - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionY.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionY.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 2 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bundle in item 2 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bitstream in item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bitstream in item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bitstream in item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bitstream in item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bitstream in item 2 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -429,41 +380,31 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception { String item1AdminToken = getAuthToken(item1Admin.getEmail(), password); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on item 2 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -478,73 +419,55 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify community A write has this feature on community A if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify community A write doesn’t have this feature on community AA - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on collection X - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on the bitstream in item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on community A - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on community AA - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -552,65 +475,49 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify collection X write has this feature on collection X if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bitstream in item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on community A - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on community AA - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on collection X - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -618,57 +525,43 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA // Verify item 1 write has this feature on item 1 if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bitstream in item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A write doesn’t have this feature on community B - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityB.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/communities/" + communityB.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on collection Y - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionY.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/collections/" + collectionY.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on item 2 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -680,17 +573,13 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc String item1WriterToken = getAuthToken(item1Writer.getEmail(), password); // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -698,25 +587,19 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc // Verify item 1 write has this feature on item 1 if the boolean parameter is true // (or doesn’t have access otherwise) if (hasDSOAccess) { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } else { - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); } // Verify item 1 write doesn’t have this feature on item 2 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -754,41 +637,31 @@ public void testCanMoveAdmin() throws Exception { final String feature = "canMove"; // Verify the general admin has this feature on item 1 - getClient(adminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 1 - getClient(item1AdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on item 2 - getClient(communityAAdminToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -805,9 +678,7 @@ public void testCanMoveAdmin() throws Exception { context.restoreAuthSystemState(); // verify item 1 write has this feature on item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") .exists()); @@ -828,9 +699,7 @@ public void testCanMoveWriter() throws Exception { String item1WriterToken = getAuthToken(item1Writer.getEmail(), password); // verify item 1 write has this feature on item 1 - getClient(item1WriterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]") .exists()); @@ -866,31 +735,25 @@ public void testCanDeleteAdmin() throws Exception { final String feature = "canDelete"; // Verify the general admin doesn’t have this feature on the site - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/sites/" + siteId)) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/sites/" + siteId) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on community A - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community A - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on community AA - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -909,174 +772,139 @@ public void testCanDeleteAdmin() throws Exception { .build(); context.restoreAuthSystemState(); String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password); - getClient(communityAAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on community A - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify community A admin doesn’t have this feature on community B - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityB.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/communities/" + communityB.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on collection X - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on collection X - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin doesn’t have this feature on collection X - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 admin doesn’t have this feature on collection X - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X admin doesn’t have this feature on collection Y - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionY.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionY.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on item 1 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken,"http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on item 1 - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken,"http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 admin doesn’t have this feature on item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bundle in item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify the general admin has this feature on the bitstream in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bitstream in item 1 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bitstream in item 1 - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bitstream in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin doesn’t have this feature on the bitstream in item 2 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream2.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bitstreams/" + bitstream2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1103,9 +931,7 @@ public void testCanDeleteAdminParent() throws Exception { context.restoreAuthSystemState(); String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password); //verify the community AA admin has this feature on community AA - getClient(communityAAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAAAdminToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1119,9 +945,7 @@ public void testCanDeleteAdminParent() throws Exception { .build(); context.restoreAuthSystemState(); // verify collection X admin has this feature on collection X - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1135,8 +959,7 @@ public void testCanDeleteAdminParent() throws Exception { .build(); context.restoreAuthSystemState(); // verify item 1 admin has this feature on item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1166,16 +989,12 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityADeleterToken = getAuthToken(communityADeleter.getEmail(), password); // Verify the user has this feature on community A - getClient(communityADeleterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityADeleterToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community AA - getClient(communityADeleterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityADeleterToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1196,23 +1015,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityARemoverToken = getAuthToken(communityARemover.getEmail(), password); // Verify the user has this feature on community AA - getClient(communityARemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityARemoverToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community A - getClient(communityARemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityA.getID())) + getAuthorizationFeatures(communityARemoverToken, "http://localhost/api/core/communities/" + communityA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on collection X - getClient(communityARemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityARemoverToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1232,23 +1045,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String communityAARemoverToken = getAuthToken(communityAARemover.getEmail(), password); // Verify the user has this feature on collection X - getClient(communityAARemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(communityAARemoverToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on community AA - getClient(communityAARemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/communities/" + communityAA.getID())) + getAuthorizationFeatures(communityAARemoverToken, "http://localhost/api/core/communities/" + communityAA.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on item 1 - getClient(communityAARemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAARemoverToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1268,9 +1075,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String collectionXRemoverToken = getAuthToken(collectionXRemover.getEmail(), password); // Verify the user doesn’t have this feature on item 1 - getClient(collectionXRemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXRemoverToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1290,8 +1095,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String item1DeleterToken = getAuthToken(item1Deleter.getEmail(), password); // Verify the user doesn’t have this feature on item 1 - getClient(item1DeleterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1DeleterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1316,23 +1120,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String collectionXRemoverItem1DeleterToken = getAuthToken(collectionXRemoverItem1Deleter.getEmail(), password); // Verify the user has this feature on item 1 - getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXRemoverItem1DeleterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on collection X - getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/collections/" + collectionX.getID())) + getAuthorizationFeatures(collectionXRemoverItem1DeleterToken, "http://localhost/api/core/collections/" + collectionX.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on the bundle in item 1 - getClient(collectionXRemoverItem1DeleterToken).perform( - get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXRemoverItem1DeleterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1352,20 +1150,17 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String item1RemoverToken = getAuthToken(item1Remover.getEmail(), password); // Verify the user has this feature on the bundle in item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1RemoverToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify this user doesn’t have this feature on item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1RemoverToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify this user doesn’t have this feature on the bitstream in item 1 - getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(item1RemoverToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1385,9 +1180,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String bundle1RemoverToken = getAuthToken(bundle1Remover.getEmail(), password); // Verify the user doesn’t have this feature on the bitstream in item 1 - getClient(bundle1RemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(bundle1RemoverToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1413,9 +1206,7 @@ public void testCanDeleteMinimalPermissions() throws Exception { context.restoreAuthSystemState(); String bundle1item1RemoverToken = getAuthToken(bundle1item1Remover.getEmail(), password); // Verify the user has this feature on the bitstream in item 1 - getClient(bundle1item1RemoverToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bitstreams/" + bitstream1.getID())) + getAuthorizationFeatures(bundle1item1RemoverToken, "http://localhost/api/core/bitstreams/" + bitstream1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1430,39 +1221,31 @@ public void testCanReorderBitstreamsAdmin() throws Exception { final String feature = "canReorderBitstreams"; // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the bundle in item 2 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1476,31 +1259,24 @@ public void testCanReorderBitstreamsWriter() throws Exception { final String feature = "canReorderBitstreams"; // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Create a new user, grant WRITE permissions on the bundle in item 1 to this user // Verify the user has this feature on the bundle in item 1 - getClient(communityAWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1515,39 +1291,31 @@ public void testCanCreateBitstreamAdmin() throws Exception { final String feature = "canCreateBitstream"; // Verify the general admin has this feature on the bundle in item 1 - getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(adminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin has this feature on the bundle in item 1 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify collection X admin has this feature on the bundle in item 1 - getClient(collectionXAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXAdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify item 1 admin has this feature on the bundle in item 1 - getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1AdminToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); // Verify community A admin doesn’t have this feature on the bundle in item 2 - getClient(communityAAdminToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle2.getID())) + getAuthorizationFeatures(communityAAdminToken, "http://localhost/api/core/bundles/" + bundle2.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1561,24 +1329,19 @@ public void testCanCreateBitstreamWriter() throws Exception { final String feature = "canCreateBitstream"; // Verify community A write doesn’t have this feature on the bundle in item 1 - getClient(communityAWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on the bundle in item 1 - getClient(collectionXWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on the bundle in item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1598,8 +1361,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1WriterToken = getAuthToken(bundle1Writer.getEmail(), password); // Verify the user doesn’t have this feature on the bundle in item 1 - getClient(bundle1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(bundle1WriterToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1619,8 +1381,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1AdderToken = getAuthToken(bundle1Adder.getEmail(), password); // Verify the user doesn’t have this feature on the bundle in item 1 - getClient(bundle1AdderToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(bundle1AdderToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1656,9 +1417,7 @@ public void testCanCreateBitstreamWriter() throws Exception { context.restoreAuthSystemState(); String bundle1WriterAdderToken = getAuthToken(bundle1WriterAdder.getEmail(), password); // Verify the user has this feature on the bundle in item 1 - getClient(bundle1WriterAdderToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/bundles/" + bundle1.getID())) + getAuthorizationFeatures(bundle1WriterAdderToken, "http://localhost/api/core/bundles/" + bundle1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); @@ -1677,24 +1436,19 @@ public void testCanCreateBundleWriter() throws Exception { final String feature = "canCreateBundle"; // Verify community A write doesn’t have this feature on item 1 - getClient(communityAWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(communityAWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify collection X write doesn’t have this feature on item 1 - getClient(collectionXWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(collectionXWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); // Verify item 1 write doesn’t have this feature on item 1 - getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1WriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").doesNotExist()); @@ -1719,11 +1473,22 @@ public void testCanCreateBundleWriter() throws Exception { context.restoreAuthSystemState(); String item1AdderWriterToken = getAuthToken(item1AdderWriter.getEmail(), password); // Verify the user has this feature on item 1 - getClient(item1AdderWriterToken) - .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri=" - + "http://localhost/api/core/items/" + item1.getID())) + getAuthorizationFeatures(item1AdderWriterToken, "http://localhost/api/core/items/" + item1.getID()) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='" + feature + "')]").exists()); } + + private ResultActions getAuthorizationFeatures(String adminToken, String uri) throws Exception { + return getAuthorizationFeatures(adminToken, uri, SIZE); + } + + private ResultActions getAuthorizationFeatures(String adminToken, String uri, int size) throws Exception { + return getClient(adminToken) + .perform( + get( + "/api/authz/authorizations/search/object?size=" + size + "&embed=feature&uri=" + uri + ) + ); + } } \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/application-test.properties b/dspace-server-webapp/src/test/resources/application-test.properties index e92e1166e355..bd9e2ea4a17b 100644 --- a/dspace-server-webapp/src/test/resources/application-test.properties +++ b/dspace-server-webapp/src/test/resources/application-test.properties @@ -16,4 +16,5 @@ ## This file is found on classpath at src/test/resources/log4j2-test.xml logging.config = classpath:log4j2-test.xml +# Our integration tests expect application to be deployed at the root path (/) server.servlet.context-path=/ \ No newline at end of file diff --git a/dspace/modules/server-boot/pom.xml b/dspace/modules/server-boot/pom.xml index 96cad508db79..ee23c8ee7bc7 100644 --- a/dspace/modules/server-boot/pom.xml +++ b/dspace/modules/server-boot/pom.xml @@ -2,7 +2,7 @@ 4.0.0 org.dspace server-boot - DSpace Server Webapp:: Boot + DSpace Server Webapp:: Executable JAR + + From 9cae03593a0918d599ede39f27582c9df1f9ad11 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 25 Jan 2024 11:09:34 +0100 Subject: [PATCH 668/686] [DSC-1503] Fix checkstyle --- .../main/java/org/dspace/content/logic/InCollectionFilter.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java b/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java index 6fdc53460f82..c7697ce82fa1 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/InCollectionFilter.java @@ -17,7 +17,6 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; -import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; @@ -43,7 +42,7 @@ public class InCollectionFilter implements Filter { private String name; private Map parameters = new HashMap<>(); private static Logger log = LogManager.getLogger(InCollectionFilter.class); - + /** * Get parameters set by spring configuration in item-filters.xml * These could be any kind of map that the extending condition class needs for evaluation From 9d5fa139452b78b33c9858111a30e68dca85ff9b Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Sun, 28 Jan 2024 18:04:05 +0100 Subject: [PATCH 669/686] DSC-1515 allows to configure a sync ExecutorService mainly for testing purpose --- .../src/test/data/dspaceFolder/config/local.cfg | 5 ++++- dspace-services/pom.xml | 12 ++++++++++++ .../dspace/services/events/SystemEventService.java | 7 ++++++- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 3618d405f7f2..2f3a64218dbf 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -223,4 +223,7 @@ orcid.synchronization-enabled = true # These settings ensure "dspace.object.owner" field are indexed by Authority Control choices.plugin.dspace.object.owner = EPersonAuthority choices.presentation.dspace.object.owner = suggest -authority.controlled.dspace.object.owner = true \ No newline at end of file +authority.controlled.dspace.object.owner = true + +# force the event system to work synchronously during test +system-event.thread.size = 0 \ No newline at end of file diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 867ac1dc1a7e..39e2ccf36f38 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -154,5 +154,17 @@ ${spring-boot.version} + + com.google.guava + guava + + + + org.checkerframework + checker-qual + + + + diff --git a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java index 5a4f118fcdc3..322b0dc8be20 100644 --- a/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java +++ b/dspace-services/src/main/java/org/dspace/services/events/SystemEventService.java @@ -15,6 +15,7 @@ import java.util.function.Supplier; import javax.annotation.PreDestroy; +import com.google.common.util.concurrent.MoreExecutors; import org.apache.commons.lang3.ArrayUtils; import org.dspace.services.ConfigurationService; import org.dspace.services.EventService; @@ -107,7 +108,11 @@ private void initExecutor() { } ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); int threadSize = configurationService.getIntProperty("system-event.thread.size", DEFAULT_THREAD_SIZE); - this.executorService = Executors.newFixedThreadPool(threadSize); + if (threadSize == 0) { + this.executorService = MoreExecutors.newDirectExecutorService(); + } else { + this.executorService = Executors.newFixedThreadPool(threadSize); + } } /* (non-Javadoc) From 7b466631b3b8620bec8721d3b7de9f85a79401d6 Mon Sep 17 00:00:00 2001 From: mohamed eskander Date: Mon, 29 Jan 2024 12:59:52 +0200 Subject: [PATCH 670/686] [DSC-1377] added support for alternative tab/box for tabpolicy/boxpolicy sheets of layout config --- .../java/org/dspace/layout/CrisLayoutBox.java | 29 +-- .../layout/CrisLayoutBox2SecurityGroup.java | 124 ++++++++++ .../java/org/dspace/layout/CrisLayoutTab.java | 29 +-- .../layout/CrisLayoutTab2SecurityGroup.java | 124 ++++++++++ .../service/CrisLayoutToolValidator.java | 2 + .../impl/CrisLayoutToolConverterImpl.java | 32 ++- .../impl/CrisLayoutToolParserImpl.java | 127 ++++++++++- .../service/CrisLayoutTabAccessService.java | 3 +- .../layout/service/CrisLayoutTabService.java | 2 + .../impl/CrisLayoutTabAccessServiceImpl.java | 16 +- .../impl/CrisLayoutTabServiceImpl.java | 9 + .../cris-layout-configuration-template.xls | Bin 686080 -> 686080 bytes ...ab_id_to_cris_layout_tab2securitygroup.sql | 14 ++ ...ox_id_to_cris_layout_box2securitygroup.sql | 14 ++ ...ab_id_to_cris_layout_tab2securitygroup.sql | 14 ++ ...ox_id_to_cris_layout_box2securitygroup.sql | 14 ++ ...ab_id_to_cris_layout_tab2securitygroup.sql | 14 ++ ...ox_id_to_cris_layout_box2securitygroup.sql | 14 ++ .../dspace/builder/CrisLayoutBoxBuilder.java | 13 ++ .../dspace/builder/CrisLayoutTabBuilder.java | 13 ++ .../converter/CrisLayoutTabConverter.java | 89 ++++++-- .../org/dspace/app/rest/LayoutSecurityIT.java | 26 ++- .../layout/CrisLayoutTabRestRepositoryIT.java | 211 ++++++++++++++++++ dspace/config/hibernate.cfg.xml | 2 + .../conftool/cris-layout-configuration.xls | Bin 126464 -> 126464 bytes 25 files changed, 852 insertions(+), 83 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java create mode 100644 dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java index 47b426b135c1..9fb9a725c5b7 100644 --- a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox.java @@ -11,6 +11,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import javax.persistence.Cacheable; import javax.persistence.CascadeType; import javax.persistence.Column; @@ -79,13 +80,8 @@ public class CrisLayoutBox implements ReloadableEntity { ) private Set metadataSecurityFields = new HashSet<>(); - @ManyToMany(fetch = FetchType.LAZY) - @JoinTable( - name = "cris_layout_box2securitygroup", - joinColumns = {@JoinColumn(name = "box_id")}, - inverseJoinColumns = {@JoinColumn(name = "group_id")} - ) - private Set groupSecurityFields = new HashSet<>(); + @OneToMany(fetch = FetchType.LAZY, mappedBy = "box", cascade = CascadeType.ALL, orphanRemoval = true) + private Set box2SecurityGroups = new HashSet<>(); @OneToMany(fetch = FetchType.LAZY, mappedBy = "box", cascade = CascadeType.ALL) @OrderBy(value = "row, cell, priority") @@ -288,20 +284,19 @@ public void setContainer(Boolean container) { this.container = container; } - public void setGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields = groupSecurityFields; - } - - public void addGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields.addAll(groupSecurityFields); + public Set getGroupSecurityFields() { + return box2SecurityGroups.stream() + .map(crisLayoutBox2SecurityGroup -> + crisLayoutBox2SecurityGroup.getGroup()) + .collect(Collectors.toSet()); } - public void addGroupSecurityFields(Group group) { - this.groupSecurityFields.add(group); + public Set getBox2SecurityGroups() { + return box2SecurityGroups; } - public Set getGroupSecurityFields() { - return groupSecurityFields; + public void setBox2SecurityGroups(Set box2SecurityGroups) { + this.box2SecurityGroups = box2SecurityGroups; } @Override diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java new file mode 100644 index 000000000000..d0ee1cd58415 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutBox2SecurityGroup.java @@ -0,0 +1,124 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.layout; + +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.Embeddable; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.MapsId; +import javax.persistence.Table; + +import org.dspace.eperson.Group; + +@Entity +@Table(name = "cris_layout_box2securitygroup") +public class CrisLayoutBox2SecurityGroup implements Serializable { + + @Embeddable + public static class CrisLayoutBox2SecurityGroupId implements Serializable { + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "box_id") + private CrisLayoutBox boxId; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "group_id") + private Group groupId; + + public CrisLayoutBox2SecurityGroupId() { + + } + + public CrisLayoutBox2SecurityGroupId(CrisLayoutBox boxId, Group groupId) { + this.boxId = boxId; + this.groupId = groupId; + } + + public CrisLayoutBox getBoxId() { + return boxId; + } + + public void setBoxId(CrisLayoutBox boxId) { + this.boxId = boxId; + } + + public Group getGroupId() { + return groupId; + } + + public void setGroupId(Group groupId) { + this.groupId = groupId; + } + } + + @EmbeddedId + private CrisLayoutBox2SecurityGroupId id; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("boxId") + @JoinColumn(name = "box_id", insertable = false, updatable = false) + private CrisLayoutBox box; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("groupId") + @JoinColumn(name = "group_id", insertable = false, updatable = false) + private Group group; + + @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL) + @JoinColumn(name = "alternative_box_id", nullable = true) + private CrisLayoutBox alternativeBox; + + public CrisLayoutBox2SecurityGroup() { + + } + + public CrisLayoutBox2SecurityGroup(CrisLayoutBox2SecurityGroupId id, + CrisLayoutBox box, Group group, + CrisLayoutBox alternativeBox) { + this.id = id; + this.box = box; + this.group = group; + this.alternativeBox = alternativeBox; + } + + public CrisLayoutBox2SecurityGroupId getId() { + return id; + } + + public void setId(CrisLayoutBox2SecurityGroupId id) { + this.id = id; + } + + public CrisLayoutBox getBox() { + return box; + } + + public void setBox(CrisLayoutBox box) { + this.box = box; + } + + public Group getGroup() { + return group; + } + + public void setGroup(Group group) { + this.group = group; + } + + public CrisLayoutBox getAlternativeBox() { + return alternativeBox; + } + + public void setAlternativeBox(CrisLayoutBox alternativeBox) { + this.alternativeBox = alternativeBox; + } +} diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java index 48bd0dc56112..9c0f4ef1e2b9 100644 --- a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab.java @@ -87,14 +87,8 @@ public class CrisLayoutTab implements ReloadableEntity { @JoinColumn(name = "tab_id") }, inverseJoinColumns = { @JoinColumn(name = "metadata_field_id") }) private Set metadataSecurityFields = new HashSet<>(); - @ManyToMany(fetch = FetchType.LAZY) - @JoinTable( - name = "cris_layout_tab2securitygroup", - joinColumns = {@JoinColumn(name = "tab_id")}, - inverseJoinColumns = {@JoinColumn(name = "group_id")} - ) - private Set groupSecurityFields = new HashSet<>(); - + @OneToMany(fetch = FetchType.LAZY, mappedBy = "tab", cascade = CascadeType.ALL) + private Set tab2SecurityGroups = new HashSet<>(); @Column(name = "is_leading") private Boolean leading; @@ -230,20 +224,19 @@ public List getBoxes() { .collect(Collectors.toList()); } - public void setGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields = groupSecurityFields; - } - - public void addGroupSecurityFields(Set groupSecurityFields) { - this.groupSecurityFields.addAll(groupSecurityFields); + public Set getGroupSecurityFields() { + return tab2SecurityGroups.stream() + .map(crisLayoutTab2SecurityGroup -> + crisLayoutTab2SecurityGroup.getGroup()) + .collect(Collectors.toSet()); } - public void addGroupSecurityFields(Group group) { - this.groupSecurityFields.add(group); + public Set getTab2SecurityGroups() { + return tab2SecurityGroups; } - public Set getGroupSecurityFields() { - return groupSecurityFields; + public void setTab2SecurityGroups(Set tab2SecurityGroups) { + this.tab2SecurityGroups = tab2SecurityGroups; } @Override diff --git a/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java new file mode 100644 index 000000000000..f41b3ec53e88 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/layout/CrisLayoutTab2SecurityGroup.java @@ -0,0 +1,124 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.layout; + +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.Embeddable; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.MapsId; +import javax.persistence.Table; + +import org.dspace.eperson.Group; + +@Entity +@Table(name = "cris_layout_tab2securitygroup") +public class CrisLayoutTab2SecurityGroup implements Serializable { + + @Embeddable + public static class CrisLayoutTab2SecurityGroupId implements Serializable { + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "tab_id") + private CrisLayoutTab tabId; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "group_id") + private Group groupId; + + public CrisLayoutTab2SecurityGroupId() { + + } + + public CrisLayoutTab2SecurityGroupId(CrisLayoutTab tabId, Group groupId) { + this.tabId = tabId; + this.groupId = groupId; + } + + public CrisLayoutTab getTabId() { + return tabId; + } + + public void setTabId(CrisLayoutTab tabId) { + this.tabId = tabId; + } + + public Group getGroupId() { + return groupId; + } + + public void setGroupId(Group groupId) { + this.groupId = groupId; + } + } + + @EmbeddedId + private CrisLayoutTab2SecurityGroupId id; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("tabId") + @JoinColumn(name = "tab_id", insertable = false, updatable = false) + private CrisLayoutTab tab; + + @ManyToOne(fetch = FetchType.LAZY) + @MapsId("groupId") + @JoinColumn(name = "group_id", insertable = false, updatable = false) + private Group group; + + @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL) + @JoinColumn(name = "alternative_tab_id") + private CrisLayoutTab alternativeTab; + + public CrisLayoutTab2SecurityGroup() { + + } + + public CrisLayoutTab2SecurityGroup(CrisLayoutTab2SecurityGroupId id, + CrisLayoutTab tab, Group group, + CrisLayoutTab alternativeTab) { + this.id = id; + this.tab = tab; + this.group = group; + this.alternativeTab = alternativeTab; + } + + public CrisLayoutTab2SecurityGroupId getId() { + return id; + } + + public void setId(CrisLayoutTab2SecurityGroupId id) { + this.id = id; + } + + public CrisLayoutTab getTab() { + return tab; + } + + public void setTab(CrisLayoutTab tab) { + this.tab = tab; + } + + public Group getGroup() { + return group; + } + + public void setGroup(Group group) { + this.group = group; + } + + public CrisLayoutTab getAlternativeTab() { + return alternativeTab; + } + + public void setAlternativeTab(CrisLayoutTab alternativeTab) { + this.alternativeTab = alternativeTab; + } +} diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java b/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java index c1a9cff5dbb4..74302960cff5 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/CrisLayoutToolValidator.java @@ -108,6 +108,8 @@ public interface CrisLayoutToolValidator { String GROUP_COLUMN = "GROUP"; + String ALTERNATIVE_TO_COLUMN = "ALTERNATIVE_TO"; + String METADATA_TYPE = "METADATA"; String BITSTREAM_TYPE = "BITSTREAM"; diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java index 1aec1c349372..52ba3ddedc16 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolConverterImpl.java @@ -31,13 +31,14 @@ import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; import org.dspace.content.MetadataField; -import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutField; import org.dspace.layout.CrisLayoutFieldBitstream; import org.dspace.layout.CrisLayoutMetric2Box; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.CrisMetadataGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.script.service.CrisLayoutToolConverter; @@ -247,9 +248,9 @@ private void buildTabPolicy(Workbook workbook, CrisLayoutTab tab) { buildTabPolicyMetadataSecurityFieldRow(sheet, tab, metadataField) ); - tab.getGroupSecurityFields() - .forEach(group -> - buildTabPolicyGroupSecurityFieldRow(sheet, tab, group) + tab.getTab2SecurityGroups() + .forEach(tab2SecurityGroup -> + buildTabPolicyGroupSecurityFieldRow(sheet, tab, tab2SecurityGroup) ); } @@ -259,14 +260,18 @@ private void buildTabPolicyMetadataSecurityFieldRow(Sheet sheet, CrisLayoutTab t createCell(row, 1, tab.getShortName()); createCell(row, 2, metadataField.toString('.')); createCell(row, 3, ""); + createCell(row, 4, ""); } - private void buildTabPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutTab tab, Group group) { + private void buildTabPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutTab tab, + CrisLayoutTab2SecurityGroup tab2SecurityGroup) { + CrisLayoutTab alternativeTab = tab2SecurityGroup.getAlternativeTab(); Row row = sheet.createRow(sheet.getLastRowNum() + 1); createCell(row, 0, tab.getEntity().getLabel()); createCell(row, 1, tab.getShortName()); createCell(row, 2, ""); - createCell(row, 3, group.getName()); + createCell(row, 3, tab2SecurityGroup.getGroup().getName()); + createCell(row, 4, alternativeTab == null ? "" : alternativeTab.getShortName()); } private void buildBoxPolicy(Workbook workbook, List boxes) { @@ -277,9 +282,9 @@ private void buildBoxPolicy(Workbook workbook, List boxes) { buildBoxPolicyMetadataSecurityFieldRow(sheet, box, metadataField) ); - box.getGroupSecurityFields() - .forEach(group -> - buildBoxPolicyGroupSecurityFieldRow(sheet, box, group) + box.getBox2SecurityGroups() + .forEach(box2SecurityGroup -> + buildBoxPolicyGroupSecurityFieldRow(sheet, box, box2SecurityGroup) ); }); } @@ -290,14 +295,19 @@ private void buildBoxPolicyMetadataSecurityFieldRow(Sheet sheet, CrisLayoutBox b createCell(row, 1, box.getShortname()); createCell(row, 2, metadataField.toString('.')); createCell(row, 3, ""); + createCell(row, 4, ""); } - private void buildBoxPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutBox box, Group group) { + private void buildBoxPolicyGroupSecurityFieldRow(Sheet sheet, CrisLayoutBox box, + CrisLayoutBox2SecurityGroup box2SecurityGroup) { + + CrisLayoutBox alternativeBox = box2SecurityGroup.getAlternativeBox(); Row row = sheet.createRow(sheet.getLastRowNum() + 1); createCell(row, 0, box.getCell().getRow().getTab().getEntity().getLabel()); createCell(row, 1, box.getShortname()); createCell(row, 2, ""); - createCell(row, 3, group.getName()); + createCell(row, 3, box2SecurityGroup.getGroup().getName()); + createCell(row, 4, alternativeBox == null ? "" : alternativeBox.getShortname()); } private String convertToString(boolean value) { diff --git a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java index 9c368c2785b2..a4f5fec248ef 100644 --- a/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/script/service/impl/CrisLayoutToolParserImpl.java @@ -7,6 +7,7 @@ */ package org.dspace.layout.script.service.impl; +import static org.dspace.layout.script.service.CrisLayoutToolValidator.ALTERNATIVE_TO_COLUMN; import static org.dspace.layout.script.service.CrisLayoutToolValidator.BITSTREAM_TYPE; import static org.dspace.layout.script.service.CrisLayoutToolValidator.BOX2METADATA_SHEET; import static org.dspace.layout.script.service.CrisLayoutToolValidator.BOX2METRICS_SHEET; @@ -53,6 +54,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -76,6 +78,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutBoxTypes; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutField; @@ -84,6 +87,7 @@ import org.dspace.layout.CrisLayoutMetric2Box; import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.CrisMetadataGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.script.service.CrisLayoutToolParser; @@ -110,9 +114,21 @@ public class CrisLayoutToolParserImpl implements CrisLayoutToolParser { @Override public List parse(Context context, Workbook workbook) { Sheet tabSheet = getSheetByName(workbook, TAB_SHEET); - return WorkbookUtils.getNotEmptyRowsSkippingHeader(tabSheet).stream() - .map(row -> buildTab(context, row)) - .collect(Collectors.toList()); + List tabs = + WorkbookUtils.getNotEmptyRowsSkippingHeader(tabSheet).stream() + .map(row -> buildTab(context, row)) + .collect(Collectors.toList()); + + tabs.forEach(tab -> { + tab.setTab2SecurityGroups(buildTab2SecurityGroups(context, + workbook, TAB_POLICY_SHEET, tab.getEntity().getLabel(), tab.getShortName(), tab, tabs)); + + tab.getBoxes().forEach(box -> + box.setBox2SecurityGroups(buildBox2SecurityGroups(context, + workbook, BOX_POLICY_SHEET, box.getEntitytype().getLabel(), box.getShortname(), box, tabs))); + }); + + return tabs; } private CrisLayoutTab buildTab(Context context, Row tabRow) { @@ -136,8 +152,6 @@ private CrisLayoutTab buildTab(Context context, Row tabRow) { buildTabRows(context, workbook, entityType, name).forEach(tab::addRow); tab.setMetadataSecurityFields(buildMetadataSecurityField(context, workbook, TAB_POLICY_SHEET, entityType, name)); - tab.setGroupSecurityFields(buildGroupSecurityField(context, workbook, - TAB_POLICY_SHEET, entityType, name)); return tab; } @@ -218,8 +232,6 @@ private CrisLayoutBox buildBox(Context context, Sheet boxSheet, String entityTyp box.setStyle(getCellValue(boxRow, STYLE_COLUMN)); box.setMetadataSecurityFields(buildMetadataSecurityField(context, workbook, BOX_POLICY_SHEET, entityType, boxName)); - box.setGroupSecurityFields(buildGroupSecurityField(context, workbook, - BOX_POLICY_SHEET, entityType, boxName)); if (boxType.equals(CrisLayoutBoxTypes.METADATA.name())) { buildCrisLayoutFields(context, workbook, entityType, boxName).forEach(box::addLayoutField); @@ -376,6 +388,107 @@ private Set buildGroupSecurityField(Context context, Workbook workbook, .collect(Collectors.toSet()); } + private Set buildBox2SecurityGroups(Context context, Workbook workbook, + String sheetName, String entity, String name, + CrisLayoutBox crisLayoutBox, + List tabs) { + Sheet sheet = getSheetByName(workbook, sheetName); + Set box2SecurityGroups = new HashSet<>(); + + getRowsByEntityAndColumnValue(sheet, entity, SHORTNAME_COLUMN, name) + .forEach(row -> { + String groupName = getCellValue(row, GROUP_COLUMN); + String alternativeBox = getCellValue(row, ALTERNATIVE_TO_COLUMN); + + if (StringUtils.isNotBlank(groupName)) { + Group group = getGroupField(context, groupName); + if (group != null) { + box2SecurityGroups.add( + buildBox2SecurityGroup(group, crisLayoutBox, entity, alternativeBox, tabs) + ); + } + } + }); + + return box2SecurityGroups; + } + + private CrisLayoutBox2SecurityGroup buildBox2SecurityGroup(Group group, CrisLayoutBox box, + String entity, + String alternativeBox, List tabs) { + + CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId box2SecurityGroupId = + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box, group); + + return new CrisLayoutBox2SecurityGroup(box2SecurityGroupId, box, group, + findAlternativeBox(alternativeBox, entity, tabs)); + } + + private CrisLayoutBox findAlternativeBox(String alternativeBox, String entityType, List tabs) { + + if (alternativeBox == null) { + return null; + } + + return tabs.stream() + .flatMap(tab -> tab.getBoxes().stream()) + .filter(crisLayoutBox -> crisLayoutBox.getShortname().equals(alternativeBox) && + crisLayoutBox.getEntitytype().getLabel().equals(entityType)) + .findFirst() + .orElseThrow(() -> new RuntimeException("Alternative box not found for shortname: " + + alternativeBox + ", entityType: " + entityType)); + } + + private Set buildTab2SecurityGroups(Context context, Workbook workbook, + String sheetName, String entity, String name, + CrisLayoutTab crisLayoutTab, + List tabs) { + Sheet sheet = getSheetByName(workbook, sheetName); + Set tab2SecurityGroups = new HashSet<>(); + + getRowsByEntityAndColumnValue(sheet, entity, SHORTNAME_COLUMN, name) + .forEach(row -> { + String groupName = getCellValue(row, GROUP_COLUMN); + String alternativeTab = getCellValue(row, ALTERNATIVE_TO_COLUMN); + + if (StringUtils.isNotBlank(groupName)) { + Group group = getGroupField(context, groupName); + if (group != null) { + tab2SecurityGroups.add( + buildTab2SecurityGroup(group, crisLayoutTab, entity, alternativeTab, tabs) + ); + } + } + }); + + return tab2SecurityGroups; + } + + private CrisLayoutTab2SecurityGroup buildTab2SecurityGroup(Group group, CrisLayoutTab tab, + String entity, + String alternativeTab, List tabs) { + + CrisLayoutTab2SecurityGroup.CrisLayoutTab2SecurityGroupId tab2SecurityGroupId = + new CrisLayoutTab2SecurityGroup.CrisLayoutTab2SecurityGroupId(tab, group); + + return new CrisLayoutTab2SecurityGroup(tab2SecurityGroupId, tab, group, + findAlternativeTab(alternativeTab, entity, tabs)); + } + + private CrisLayoutTab findAlternativeTab(String alternativeTab, String entityType, List tabs) { + + if (alternativeTab == null) { + return null; + } + + return tabs.stream() + .filter(crisLayoutTab -> crisLayoutTab.getShortName().equals(alternativeTab) && + crisLayoutTab.getEntity().getLabel().equals(entityType)) + .findFirst() + .orElseThrow(() -> new RuntimeException("Alternative tab not found for shortname: " + + alternativeTab + ", entityType: " + entityType)); + } + private Stream getRowsByEntityAndColumnValue(Sheet sheet, String entity, String columnName, String value) { return WorkbookUtils.getNotEmptyRowsSkippingHeader(sheet).stream() .filter(row -> value.equals(getCellValue(row, columnName))) diff --git a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java index 12d7d08084e9..2679d34865df 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabAccessService.java @@ -32,6 +32,5 @@ public interface CrisLayoutTabAccessService { * @return true if access has to be granded, false otherwise * @throws SQLException in case of error during database access */ - boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item) - throws SQLException; + boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java index 7224b8f26302..919dc7eb4310 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/CrisLayoutTabService.java @@ -152,4 +152,6 @@ public List getMetadataField(Context context, Integer tabId, Inte * @throws SQLException An exception that provides information on a database errors. */ public List findByItem(Context context, String itemUuid) throws SQLException; + + public boolean hasAccess(Context context, CrisLayoutTab tab, Item item); } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java index 71e20fd883fa..331c5df679be 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabAccessServiceImpl.java @@ -31,12 +31,14 @@ public CrisLayoutTabAccessServiceImpl(LayoutSecurityService layoutSecurityServic } @Override - public boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item) throws SQLException { - return layoutSecurityService.hasAccess(LayoutSecurity.valueOf(tab.getSecurity()), - context, - user, - tab.getMetadataSecurityFields(), - tab.getGroupSecurityFields(), - item); + public boolean hasAccess(Context context, EPerson user, CrisLayoutTab tab, Item item) { + try { + return layoutSecurityService.hasAccess( + LayoutSecurity.valueOf(tab.getSecurity()), context, user, tab.getMetadataSecurityFields(), + tab.getGroupSecurityFields(), item + ); + } catch (SQLException e) { + throw new RuntimeException(e); + } } } diff --git a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java index 2ce7ee7ac4f4..980305e67d6e 100644 --- a/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/layout/service/impl/CrisLayoutTabServiceImpl.java @@ -29,6 +29,7 @@ import org.dspace.core.Context; import org.dspace.layout.CrisLayoutTab; import org.dspace.layout.dao.CrisLayoutTabDAO; +import org.dspace.layout.service.CrisLayoutTabAccessService; import org.dspace.layout.service.CrisLayoutTabService; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -54,6 +55,9 @@ public class CrisLayoutTabServiceImpl implements CrisLayoutTabService { private SubmissionConfigReader submissionConfigReader; + @Autowired + CrisLayoutTabAccessService crisLayoutTabAccessService; + @PostConstruct private void setup() throws SubmissionConfigReaderException { submissionConfigReader = new SubmissionConfigReader(); @@ -217,6 +221,11 @@ public List findByItem(Context context, String itemUuid) throws S return layoutTabs; } + @Override + public boolean hasAccess(Context context, CrisLayoutTab tab, Item item) { + return crisLayoutTabAccessService.hasAccess(context, context.getCurrentUser(), tab, item); + } + private String getSubmissionDefinitionName(Item item) { if (submissionConfigReader == null || item.getOwningCollection() == null) { return ""; diff --git a/dspace-api/src/main/resources/org/dspace/layout/script/service/impl/cris-layout-configuration-template.xls b/dspace-api/src/main/resources/org/dspace/layout/script/service/impl/cris-layout-configuration-template.xls index a921219abfad173dde9253ce4368468386eda6c7..2eddc88c677749d144d96421e076cd3a8938de3d 100644 GIT binary patch delta 1476 zcmb7@OKcNY6o${eH>9Br91=(!Gc`@36xd{#5v=U$55$N6OF9z zzyEp8+`_Z2!n3aB1K$2y9>6~d^4Y`lph1_P&`sEXc=q%>c;HGy#bIFxwupka>D;Lp ziGDctMLzp^HlO|S#O%qr+&ly!)j=m7Kzl!)o|ry@asbTDB-c~ox!IZNBVUvFDqa!M z>xjdNig+?VJ(Hu{^Uwf?k|Zh=J~f>hY&lNiH+Q%tVmOJY>98jHe9gSI#~Am`F!u}o z89H_uEB+xCz|nT^pB(w*!0yS+rz2C76UJZK_l)f_%7I1}8whD=1vm>{;P}6~!C?s( z^qcW}V>2rbeJ2a?F#Ht@ikG3N)b7?rG!Y531oUdO3^K%x5k19YH{VNA2slYy;DYl; zu_?>hCGomD8e4NaIjG{}8YXi|q+5~)2g8m_1r=5`C#47Xey^s47iUS;ocq8Bk89YE zZ?I(i)N#bNCnKd`b9I<{d}n%tQ$iI7^$o`Dc!%9JVm`P#mTauLm8LsgfpRCknSdL0oI6_&E_1{=PQPYb*j7p+T$A zd(Yszm5`a$w;fL5(B@P=QW)sEf@IjTx*X|R=CGiZ}7Tdy5dXB{T1M6LGO>@n~7@S;(w&d?Yio z?iybTnl)F0VxLTHGO?u}|Gk|dtDS|#vW!oixmb3OFR^9w{;2ZHe*`Y9WE6br&6SM$ wN9T<>^+QQb!^b!zoSM*!`*LcV!k_OSzR-J@iI^*77INxo#>(d3dDShw1(10Fod5s; delta 1377 zcmZ`&O=weD6h8O9{J#9WK9d(?ZJTClJ8h#)nrf}>ECdT;78arCPKi2>g~k>etur94 zKegyW(JN#j8M>;on1y%8n8PFgeCg>%YJver11_JO~T}5HRD8$5caP8EQB#~h# z{9Mc*&KL8)93ML|J~0CkXx>1P0H8dJ(c>eBkk(pBoU&E5UQH>1YGz5X;e{r3I6&%Mi`C(0*l@9#JCrgR4yFpxuo@9pb z7zw4#Yluv$QE$+Vs)ZZe&?)yC-8}hpmD5?>adv?V&dWd6<~h44!l^{^h1c<*vL9es zs8xj5Cq*ICEcSIxu9$C3Os2=#qWCA>C|D%^-Mkx_m3b%G1=*0@!KRw9&@wg#z;F=~ zRm5REG~0;b(*RmtQZh^VLltrS0$OZDt7YImSCn`!M6}wJNB1Z-#yz-CRGxS@meydSt#VtXH`&Nz#pv5uh9MO^7}Owb z2fbD>Vi%g=Kboh`(eS%~r6>&W{$XD4Xp*9SFmwYSBb+UdKz+PiOs_cqIW zc43L*QOt!US8iUvTeHt(_phn{?v`9z)nw_?65lxcbaPuc#8k|s@eP`s*m*vkdnMk^#=JbSm Nim@K^|Dx(uz5#Vw{(}Gj diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql new file mode 100644 index 000000000000..6ae50fb29bf6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_tab2securitygroup ADD alternative_tab_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_tab2securitygroup ADD COLUMN alternative_tab_id INTEGER; +ALTER TABLE cris_layout_tab2securitygroup ADD CONSTRAINT cris_layout_tab2securitygroup_tab_id2 FOREIGN KEY (alternative_tab_id) REFERENCES cris_layout_tab (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql new file mode 100644 index 000000000000..38360bb13cd8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_box2securitygroup ADD alternative_box_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_box2securitygroup ADD COLUMN alternative_box_id INTEGER; +ALTER TABLE cris_layout_box2securitygroup ADD CONSTRAINT cris_layout_box2securitygroup_box_id2 FOREIGN KEY (alternative_box_id) REFERENCES cris_layout_box (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql new file mode 100644 index 000000000000..6ae50fb29bf6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_tab2securitygroup ADD alternative_tab_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_tab2securitygroup ADD COLUMN alternative_tab_id INTEGER; +ALTER TABLE cris_layout_tab2securitygroup ADD CONSTRAINT cris_layout_tab2securitygroup_tab_id2 FOREIGN KEY (alternative_tab_id) REFERENCES cris_layout_tab (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql new file mode 100644 index 000000000000..38360bb13cd8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_box2securitygroup ADD alternative_box_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_box2securitygroup ADD COLUMN alternative_box_id INTEGER; +ALTER TABLE cris_layout_box2securitygroup ADD CONSTRAINT cris_layout_box2securitygroup_box_id2 FOREIGN KEY (alternative_box_id) REFERENCES cris_layout_box (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql new file mode 100644 index 000000000000..6ae50fb29bf6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.12__add_alternative_tab_id_to_cris_layout_tab2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_tab2securitygroup ADD alternative_tab_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_tab2securitygroup ADD COLUMN alternative_tab_id INTEGER; +ALTER TABLE cris_layout_tab2securitygroup ADD CONSTRAINT cris_layout_tab2securitygroup_tab_id2 FOREIGN KEY (alternative_tab_id) REFERENCES cris_layout_tab (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql new file mode 100644 index 000000000000..38360bb13cd8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.12.13__add_alternative_box_id_to_cris_layout_box2securitygroup.sql @@ -0,0 +1,14 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Alter TABLE cris_layout_box2securitygroup ADD alternative_box_id +----------------------------------------------------------------------------------- + +ALTER TABLE cris_layout_box2securitygroup ADD COLUMN alternative_box_id INTEGER; +ALTER TABLE cris_layout_box2securitygroup ADD CONSTRAINT cris_layout_box2securitygroup_box_id2 FOREIGN KEY (alternative_box_id) REFERENCES cris_layout_box (id) ON DELETE SET NULL; \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java index cf80c8778dce..59784b192ed4 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutBoxBuilder.java @@ -19,7 +19,9 @@ import org.dspace.content.MetadataField; import org.dspace.core.Context; import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutBoxTypes; import org.dspace.layout.CrisLayoutField; import org.dspace.layout.LayoutSecurity; @@ -163,6 +165,17 @@ public CrisLayoutBoxBuilder addMetadataSecurityField(MetadataField field) { return this; } + public CrisLayoutBoxBuilder addBox2SecurityGroups(Group group, CrisLayoutBox alternativeBox) throws SQLException { + if (this.box.getBox2SecurityGroups() == null) { + this.box.setBox2SecurityGroups(new HashSet<>()); + } + this.box.getBox2SecurityGroups().add( + new CrisLayoutBox2SecurityGroup(new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box, group), + box, group, alternativeBox) + ); + return this; + } + public CrisLayoutBoxBuilder withContainer(boolean container) { this.box.setContainer(container); return this; diff --git a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java index 312d04bb4ada..4736324f4d35 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CrisLayoutTabBuilder.java @@ -20,10 +20,12 @@ import org.dspace.content.MetadataField; import org.dspace.core.Context; import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.Group; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.service.CrisLayoutTabService; @@ -207,4 +209,15 @@ public CrisLayoutTabBuilder addMetadatasecurity(MetadataField metadataField) { this.tab.getMetadataSecurityFields().add(metadataField); return this; } + + public CrisLayoutTabBuilder addTab2SecurityGroups(Group group, CrisLayoutTab alternativeTab) { + if (this.tab.getTab2SecurityGroups() == null) { + this.tab.setTab2SecurityGroups(new HashSet<>()); + } + this.tab.getTab2SecurityGroups().add( + new CrisLayoutTab2SecurityGroup(new CrisLayoutTab2SecurityGroup.CrisLayoutTab2SecurityGroupId(tab, group), + tab, group, alternativeTab) + ); + return this; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java index d15a04cf0108..2c94bd8a8f3f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CrisLayoutTabConverter.java @@ -9,6 +9,7 @@ import java.sql.SQLException; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -27,11 +28,14 @@ import org.dspace.core.Context; import org.dspace.core.exception.SQLRuntimeException; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.CrisLayoutCell; import org.dspace.layout.CrisLayoutRow; import org.dspace.layout.CrisLayoutTab; +import org.dspace.layout.CrisLayoutTab2SecurityGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.layout.service.CrisLayoutBoxService; +import org.dspace.layout.service.CrisLayoutTabService; import org.dspace.services.RequestService; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -61,18 +65,47 @@ public class CrisLayoutTabConverter implements DSpaceConverter convertTab(tab, projection)) + .orElseGet(CrisLayoutTabRest::new); + } + + private boolean hasAccess(Item item, CrisLayoutTab tab) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return crisLayoutTabService.hasAccess(context, tab, item); + } + + private CrisLayoutTab findAlternativeTab(CrisLayoutTab tab) { + return tab.getTab2SecurityGroups() + .stream() + .map(CrisLayoutTab2SecurityGroup::getAlternativeTab) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + } + + private CrisLayoutTabRest convertTab(CrisLayoutTab tab, Projection projection) { CrisLayoutTabRest rest = new CrisLayoutTabRest(); - rest.setId(model.getID()); - rest.setEntityType(model.getEntity().getLabel()); - rest.setCustomFilter(model.getCustomFilter()); - rest.setShortname(model.getShortName()); - rest.setHeader(model.getHeader()); - rest.setPriority(model.getPriority()); - rest.setSecurity(model.getSecurity()); - rest.setRows(convertRows(getScopeItem(), model.getRows(), projection)); - rest.setLeading(model.isLeading()); + rest.setId(tab.getID()); + rest.setEntityType(tab.getEntity().getLabel()); + rest.setCustomFilter(tab.getCustomFilter()); + rest.setShortname(tab.getShortName()); + rest.setHeader(tab.getHeader()); + rest.setPriority(tab.getPriority()); + rest.setSecurity(tab.getSecurity()); + rest.setRows(convertRows(getScopeItem(), tab.getRows(), projection)); + rest.setLeading(tab.isLeading()); return rest; } @@ -124,15 +157,43 @@ private CrisLayoutCellRest convertCell(Item item, CrisLayoutCell cell, Projectio private List convertBoxes(Item item, List boxes, Projection projection) { return boxes.stream() - .filter(box -> item == null || hasAccess(item, box)) - .map(box -> boxConverter.convert(box, projection)) - .collect(Collectors.toList()); + .map(box -> getCrisLayoutBox(item, box)) + .filter(Objects::nonNull) + .map(box -> boxConverter.convert(box, projection)) + .collect(Collectors.toList()); + } + + private CrisLayoutBox getCrisLayoutBox(Item item, CrisLayoutBox box) { + + if (item == null) { + return box; + } + + return Optional.of(box) + .filter(b -> hasAccess(item, b) && hasContent(item, b)) + .orElseGet(() -> + Optional.ofNullable(findAlternativeBox(box)) + .filter(altBox -> hasContent(item, altBox)) + .orElse(null)); } private boolean hasAccess(Item item, CrisLayoutBox box) { Context context = ContextUtil.obtainCurrentRequestContext(); - return crisLayoutBoxService.hasContent(context, box, item) - && crisLayoutBoxService.hasAccess(context, box, item); + return crisLayoutBoxService.hasAccess(context, box, item); + } + + private boolean hasContent(Item item, CrisLayoutBox box) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return crisLayoutBoxService.hasContent(context, box, item); + } + + private CrisLayoutBox findAlternativeBox(CrisLayoutBox box) { + return box.getBox2SecurityGroups() + .stream() + .map(CrisLayoutBox2SecurityGroup::getAlternativeBox) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); } private CrisLayoutRow toRowModel(Context context, CrisLayoutRowRest rowRest) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java index 5d293ce2fbc3..aa5705beb861 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/LayoutSecurityIT.java @@ -53,6 +53,7 @@ import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.dspace.layout.CrisLayoutBox; +import org.dspace.layout.CrisLayoutBox2SecurityGroup; import org.dspace.layout.LayoutSecurity; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.hamcrest.Matchers; @@ -491,14 +492,20 @@ public void customDataTestWithOneGroup() throws Exception { .build(); // Create Group with member userA - Set groups = new HashSet<>(); + Set box2SecurityGroups = new HashSet<>(); Group testGroup = GroupBuilder.createGroup(context) .withName("testGroup") .addMember(userA) .build(); - groups.add(testGroup); - box1.setGroupSecurityFields(groups); + new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup), + box1, testGroup, null); + + box2SecurityGroups.add(new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup), + box1, testGroup, null)); + box1.setBox2SecurityGroups(box2SecurityGroups); CrisLayoutFieldBuilder.createMetadataField(context, abs, 0, 0) .withLabel("LABEL ABS") @@ -577,7 +584,7 @@ public void customDataTestWithMultipleGroup() throws Exception { .build(); // Create Group with member userA - Set boxGroups = new HashSet<>(); + Set boxGroups = new HashSet<>(); Group testGroup = GroupBuilder.createGroup(context) .withName("testGroup") @@ -589,9 +596,14 @@ public void customDataTestWithMultipleGroup() throws Exception { .addMember(userB) .build(); - boxGroups.add(testGroup); - boxGroups.add(testGroup1); - box1.setGroupSecurityFields(boxGroups); + boxGroups.add(new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup), + box1, testGroup, null)); + boxGroups.add(new CrisLayoutBox2SecurityGroup( + new CrisLayoutBox2SecurityGroup.CrisLayoutBox2SecurityGroupId(box1, testGroup1), + box1, testGroup, null)); + + box1.setBox2SecurityGroups(boxGroups); CrisLayoutFieldBuilder.createMetadataField(context, abs, 0, 0) .withLabel("LABEL ABS") diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index d536f547d0bd..aaa3fb1c62b6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -77,6 +77,7 @@ import org.dspace.content.service.RelationshipService; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; import org.dspace.layout.CrisLayoutBox; import org.dspace.layout.CrisLayoutBoxTypes; import org.dspace.layout.CrisLayoutCell; @@ -122,6 +123,9 @@ public class CrisLayoutTabRestRepositoryIT extends AbstractControllerIntegration @Autowired protected RelationshipService relationshipService; + @Autowired + protected GroupService groupService; + private final String METADATASECURITY_URL = "http://localhost:8080/api/core/metadatafield/"; /** @@ -2469,6 +2473,213 @@ public void excludeThumbnailNegativeMetadataValueMatcherTabMultiBoxConfiguration .andExpect(jsonPath("$._embedded.tabs[0].rows[1]").doesNotExist()); } + @Test + public void testFindByItemWithAlternativeTabs() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = mdss.find(context, "person"); + MetadataField firstName = mfss.findByElement(context, schema, "givenName", null); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + // Create new community + Community community = CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + // Create new collection + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Test Collection") + .build(); + // Create entity Type + EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + EntityType eTypePer = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + // Create new person item + Item item = ItemBuilder.createItem(context, collection) + .withPersonIdentifierFirstName("Danilo") + .withPersonIdentifierLastName("Di Nuzzo") + .withEntityType(eTypePer.getLabel()) + .build(); + + CrisLayoutBox boxOne = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .build(); + + CrisLayoutBox boxTwo = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 2") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxOne) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxTwo) + .build(); + + // add boxOne to tabOne + CrisLayoutTab tabOne = + CrisLayoutTabBuilder.createTab(context, eTypePer, 0) + .withShortName("TabOne For Person - priority 0") + .withSecurity(LayoutSecurity.ADMINISTRATOR) + .withHeader("New Tab header") + .addBoxIntoNewRow(boxOne, "rowTwoStyle", "cellOfRowTwoStyle") + .build(); + + // add boxTwo to tabTwo + CrisLayoutTab tabTwo = + CrisLayoutTabBuilder.createTab(context, eTypePer, 0) + .withShortName("Tab2 For Person - priority 0") + .withSecurity(LayoutSecurity.CUSTOM_DATA) + .withHeader("New Tab2 header") + .addBoxIntoNewRow(boxTwo, "rowTwoStyle2", "cellOfRowTwoStyle2") + .addTab2SecurityGroups(adminGroup, tabOne) + .build(); + + context.restoreAuthSystemState(); + + // admin user will see two tabs + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(2))) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tabOne.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.ADMINISTRATOR.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[1].id", is(tabTwo.getID()))) + .andExpect(jsonPath("$._embedded.tabs[1].shortname", is("Tab2 For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[1].header", is("New Tab2 header"))) + .andExpect(jsonPath("$._embedded.tabs[1].security", is(LayoutSecurity.CUSTOM_DATA.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[1].rows", hasSize(1))); + + // anonymous user will see only alternative tab is tabOne + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(1))) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tabOne.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.ADMINISTRATOR.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].style", is("rowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].style", is("cellOfRowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(boxOne)))); + } + + @Test + public void testFindByItemWithAlternativeBoxes() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = mdss.find(context, "person"); + MetadataField firstName = mfss.findByElement(context, schema, "givenName", null); + Group adminGroup = groupService.findByName(context, Group.ADMIN); + // Create new community + Community community = CommunityBuilder.createCommunity(context) + .withName("Test Community") + .withTitle("Title test community") + .build(); + // Create new collection + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Test Collection") + .build(); + // Create entity Type + EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + EntityType eTypePer = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + // Create new person item + Item item = ItemBuilder.createItem(context, collection) + .withPersonIdentifierFirstName("Danilo") + .withPersonIdentifierLastName("Di Nuzzo") + .withEntityType(eTypePer.getLabel()) + .build(); + + CrisLayoutBox boxOne = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 1") + .withSecurity(LayoutSecurity.PUBLIC) + .withContainer(false) + .build(); + + // add boxOne as alternative to boxTwo + CrisLayoutBox boxTwo = CrisLayoutBoxBuilder.createBuilder(context, eTypePer, false, false) + .withShortname("Box shortname 2") + .withSecurity(LayoutSecurity.CUSTOM_DATA) + .withContainer(false) + .addBox2SecurityGroups(adminGroup, boxOne) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxOne) + .build(); + + CrisLayoutFieldBuilder.createMetadataField(context, firstName, 0, 1) + .withLabel("GIVEN NAME") + .withRendering("TEXT") + .withBox(boxTwo) + .build(); + + // add boxTwo to tab + CrisLayoutTab tab = CrisLayoutTabBuilder.createTab(context, eTypePer, 0) + .withShortName("TabOne For Person - priority 0") + .withSecurity(LayoutSecurity.PUBLIC) + .withHeader("New Tab header") + .withLeading(true) + .addBoxIntoNewRow(boxTwo, "rowTwoStyle", "cellOfRowTwoStyle") + .build(); + + context.restoreAuthSystemState(); + + // admin user will see boxTwo + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tab.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].leading", is(true))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.PUBLIC.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].style", is("rowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].style", is("cellOfRowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(boxTwo)))); + + // anonymous user will see boxOne + getClient().perform(get("/api/layout/tabs/search/findByItem") + .param("uuid", item.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.tabs[0].id", is(tab.getID()))) + .andExpect(jsonPath("$._embedded.tabs[0].shortname", is("TabOne For Person - priority 0"))) + .andExpect(jsonPath("$._embedded.tabs[0].header", is("New Tab header"))) + .andExpect(jsonPath("$._embedded.tabs[0].leading", is(true))) + .andExpect(jsonPath("$._embedded.tabs[0].security", is(LayoutSecurity.PUBLIC.getValue()))) + .andExpect(jsonPath("$._embedded.tabs[0].rows", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].style", is("rowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells", hasSize(1))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].style", is("cellOfRowTwoStyle"))) + .andExpect(jsonPath("$._embedded.tabs[0].rows[0].cells[0].boxes", contains(matchBox(boxOne)))); + } + private CrisLayoutTabRest parseJson(String name) throws Exception { return new ObjectMapper().readValue(getFileInputStream(name), CrisLayoutTabRest.class); } diff --git a/dspace/config/hibernate.cfg.xml b/dspace/config/hibernate.cfg.xml index 8597bedbc34a..9a3cdd967f0f 100644 --- a/dspace/config/hibernate.cfg.xml +++ b/dspace/config/hibernate.cfg.xml @@ -109,6 +109,8 @@ + + diff --git a/dspace/etc/conftool/cris-layout-configuration.xls b/dspace/etc/conftool/cris-layout-configuration.xls index 6a7d9daf8e8083166a0cd7ac64511b5f4c3be1a6..d9c9ab9f0090d4baf78b6269a88f18a9ddc03cc4 100644 GIT binary patch delta 1685 zcma)5ZD?C%6h7zX`>Qv%OJbHb4Y6z`tFukA&aUh1mzf(I&bcZSic1$Q6kDe*g$;ANiM%Xdxg`ECHkF#)S)a$5MdoI&)~W1oV0QBt%HLD zsUw{F{MMpzI6W|UXfc}`9!zngmFw6)5N&=l3vSp{iK>mFHJcX}UgS0xEGf~50GyHw zt|7GS_SIPdcFIj+n@6eu=-Azzc;2@r|*7h zON);kT~~BDMv`>jX;a^|meLvs1oY`U8dW&Rn4fYvhTU z(?YOI^X7Fu3D0SWUZf+D`?#0<&fdKr`bx)`I*#OQf$ue6j8+ilX@-+BE% z=Xsu{mQAT;)8vK`QQ0)E7#@Ew2>j?E+1--{Crs`UXc8FhIe8`xR_N)-N2utA`sW1N zfH69BBH7*7o$OA77j93HZ1?#FuLw71<=w9g^qx+G3@+MlcMtY`Eu0qNJauw#p!Zeb zYW^l~98L}lp3YZu!D|b`Xcjt-4@6=wr@;lE%c$BoTeErK+!MlPhyRsmj!LLdrtL#$ zYWLQf0gfmw&Q7v&?{Dqn=Bt)Xp+Tma!24mPW4BHc$5(ID=oE771Fls&?v`V3%zdgYI3 zn-h&nPTq_MBXhp$9#TC}Z(nKl&&Yx!)RA%Oiv#oS9t5YWg*Ig#N8f zmeh76DsYr`CxW3xQ$dkf)MgQj>f`icqT$JX-a=Fvl);R%pux(Uy;`Kn#^c&sp zpKX+Xuotw8wTvrkvzHC_{ujiv(fI&xT|YI?H64ggc; zJcHy=^-Aiup1;kKEB~ARo)1#yS_gf9tr{O9{dce1I1|J{=9{Raxm-W*S@XS0!WkNy zud3bIS~LaT!|kolinjF8!h`rQU0?a^>o9ztWbwjX0<5Sy7~_16rJzLp8)T;uottnCS{&K&e>U3{v7R zIV0<_Z$d%|8)?^XE!hityy%sGbQMk%a*<5-lpl8#KD4dy;Vc(kZ!iC>=%R_-pN~`h zN+A36QG8o+mMF_a16ZG}Ifn1Ngdb<8uHt2>rX*xrkH?sdFig1S$?Td%Lw4#*JY1Ps zUB}^1Hn3G^i?%`+FEe%H#6e%?&`k`Qo21ypt)WGvklvk5-NZA9$Fg^Bp-J~24H9cs From 0e16cb5a3b07109c7d507ab63d76aa243759f92a Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Mon, 5 Feb 2024 17:33:31 +0100 Subject: [PATCH 671/686] [DSC-1526] Add test to check the bug --- .../org/dspace/app/rest/CorrectionStepIT.java | 98 ++++++++++++++++++- 1 file changed, 97 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java index 34665592823e..685036833f06 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CorrectionStepIT.java @@ -126,7 +126,8 @@ public void setup() throws Exception { .withEntityType("Publication") .withWorkflowGroup("editor", admin) .withSubmitterGroup(eperson) - .withSubmissionDefinition("traditional-with-correction") + .withSubmissionDefinition("traditional") + .withCorrectionSubmissionDefinition("traditional-with-correction") .build(); date = "2020-02-20"; @@ -274,6 +275,101 @@ public void checkCorrection() throws Exception { } + @Test + public void checkCorrectionWithDuplicates() throws Exception { + + String tokenSubmitter = getAuthToken(eperson.getEmail(), password); + + //create a correction item + getClient(tokenSubmitter).perform(post("/api/submission/workspaceitems") + .param("owningCollection", collection.getID().toString()) + .param("relationship", "isCorrectionOfItem") + .param("item", itemToBeCorrected.getID().toString()) + .contentType(org.springframework.http.MediaType.APPLICATION_JSON)) + .andExpect(status().isCreated()) + .andDo(result -> workspaceItemIdRef.set(read(result.getResponse().getContentAsString(), "$.id"))); + + List relationshipList = relationshipService.findByItem(context, itemToBeCorrected); + assert relationshipList.size() > 0; + Item correctedItem = relationshipList.get(0).getLeftItem(); + WorkspaceItem newWorkspaceItem = workspaceItemService.findByItem(context,correctedItem); + + //make a change on the title + Map value = new HashMap(); + final String newDate = "2020-02-21"; + value.put("value", newDate); + List operations = new ArrayList(); + operations.add(new ReplaceOperation("/sections/traditionalpageone/dc.date.issued/0", value)); + String patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + final String newTitle = "New Title"; + value.put("value", newTitle); + operations = new ArrayList(); + operations.add(new ReplaceOperation("/sections/traditionalpageone/dc.title/0", value)); + patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + //remove subject + operations = new ArrayList(); + operations.add(new RemoveOperation("/sections/traditionalpagetwo/dc.subject/0")); + patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + //add an asbtract description + Map addValue = new HashMap(); + final String newDescription = "New Description"; + addValue.put("value", newDescription); + operations = new ArrayList(); + operations.add(new AddOperation("/sections/traditionalpagetwo/dc.description.abstract", List.of(addValue))); + patchBody = getPatchContent(operations); + getClient(tokenSubmitter).perform(patch("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .content(patchBody) + .contentType("application/json-patch+json")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()); + + getClient(tokenSubmitter).perform(get("/api/submission/workspaceitems/" + newWorkspaceItem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sections.correction.metadata").doesNotExist()); + + AtomicReference workflowItemIdRef = new AtomicReference(); + + getClient(tokenSubmitter).perform(post("/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + newWorkspaceItem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()) + .andDo(result -> workflowItemIdRef.set(read(result.getResponse().getContentAsString(), "$.id"))); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + //check if the correction is present + final String extraEntry = "ExtraEntry"; + getClient(tokenAdmin).perform(get("/api/workflow/workflowitems/" + workflowItemIdRef.get())) + //The status has to be 200 OK + .andExpect(status().isOk()) + //The array of browse index should have a size equals to 4 + .andExpect(jsonPath("$.sections.correction.metadata", hasSize(equalTo(4)))) + .andExpect(jsonPath("$.sections.correction.empty", is(false))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(newTitle)))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(newDate)))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(newDescription)))) + .andExpect(jsonPath("$.sections.correction.metadata",hasItem(matchMetadataCorrection(extraEntry)))); + + } + @Test public void checkEmptyCorrection() throws Exception { String tokenSubmitter = getAuthToken(eperson.getEmail(), password); From 2740523fe89176d2819fd9606fa812fedc630de8 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Mon, 5 Feb 2024 18:47:19 +0100 Subject: [PATCH 672/686] Remove duplicate field introduced by automatic git-merge --- dspace/config/submission-forms.xml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index e2c51d43b55d..45137c1ab15d 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -47,17 +47,6 @@ - - - bitstream - hide - - dropdown - false - - - -
    From 90bda15fc62d82e7b6d0f8d635203dbf791c0126 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Feb 2024 08:55:03 +0100 Subject: [PATCH 673/686] DSC-1458 fix test to consider virtual metadata --- .../service/BulkImportWorkbookBuilderIT.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java index 9de061db8f94..bef8ca45c09c 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkimport/service/BulkImportWorkbookBuilderIT.java @@ -209,7 +209,7 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { Item firstItem = getItemFromMessage(handler.getInfoMessages().get(7)); assertThat(firstItem, notNullValue()); - assertThat(firstItem.getMetadata(), hasSize(16)); + assertThat(firstItem.getMetadata(), hasSize(18)); assertThat(firstItem.getMetadata(), hasItems( with("dc.title", "Test Publication"), with("dc.date.issued", "2020/02/15"), @@ -219,7 +219,9 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { with("dc.subject", "Java", 1), with("dc.subject", "DSpace", 2), with("dc.contributor.author", "White, Walter", authorId, 600), - with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE))); + with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); assertThat(getItemBitstreamsByBundle(firstItem, "ORIGINAL"), contains( bitstreamWith("Bitstream 1", "First bitstream content"), @@ -227,7 +229,7 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { Item secondItem = getItemFromMessage(handler.getInfoMessages().get(10)); assertThat(secondItem, notNullValue()); - assertThat(secondItem.getMetadata(), hasSize(16)); + assertThat(secondItem.getMetadata(), hasSize(22)); assertThat(secondItem.getMetadata(), hasItems( with("dc.title", "Second Publication"), with("dc.date.issued", "2022/02/15"), @@ -237,7 +239,11 @@ public void testWorkbookBuildingFromItemDtos() throws Exception { with("dc.contributor.author", "Jesse Pinkman", jesse.getID().toString(), 600), with("dc.contributor.author", "Test User", testUser.getID().toString(), 1, 600), with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE), - with("oairecerif.author.affiliation", "Company", 1) + with("oairecerif.author.affiliation", "Company", 1), + with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE), + with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE) )); assertThat(getItemBitstreamsByBundle(secondItem, "ORIGINAL"), contains( From b86c0faccb63e50afb5017aa7b7aa27670deb2e2 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Feb 2024 09:04:43 +0100 Subject: [PATCH 674/686] DSC-1458 fix test to consider virtual metadata --- .../test/java/org/dspace/harvest/OAIHarvesterIT.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java index c180b77dc26e..b305ccc18061 100644 --- a/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java +++ b/dspace-api/src/test/java/org/dspace/harvest/OAIHarvesterIT.java @@ -769,7 +769,7 @@ public void testRunHarvestWithPublicationAndThenPerson() throws Exception { Item publication = publications.get(0); List values = publication.getMetadata(); - assertThat(values, hasSize(19)); + assertThat(values, hasSize(21)); assertThat(values, hasItems(with("dc.title", "Test Publication"))); assertThat(values, hasItems(with("dc.type", "Controlled Vocabulary for Resource Type Genres::text"))); @@ -780,6 +780,8 @@ public void testRunHarvestWithPublicationAndThenPerson() throws Exception { assertThat(values, hasItems(with("oaire.citation.endPage", "180"))); assertThat(values, hasItems(with("dc.identifier.doi", "10.1007/978-3-642-35233-1_18"))); assertThat(values, hasItems(with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItems(with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItems(with("cris.virtual.author-orcid", PLACEHOLDER_PARENT_METADATA_VALUE))); assertThat(values, hasItems(with("cris.sourceId", "test-harvest::3"))); assertThat(values, hasItems(with("dspace.entity.type", "Publication"))); @@ -859,7 +861,7 @@ public void testRunHarvestWithPersonAndThenPublication() throws Exception { Item person = findItemByOaiID("oai:test-harvest:Persons/123", personCollection); List values = person.getMetadata(); - assertThat(values, hasSize(14)); + assertThat(values, hasSize(12)); assertThat(values, hasItems(with("dc.title", "Manghi, Paolo"))); assertThat(values, hasItems(with("cris.sourceId", "test-harvest::123"))); assertThat(values, hasItems(with("dspace.entity.type", "Person"))); @@ -872,7 +874,7 @@ public void testRunHarvestWithPersonAndThenPublication() throws Exception { Item publication = findItemByOaiID("oai:test-harvest:Publications/3", collection); values = publication.getMetadata(); - assertThat(values, hasSize(19)); + assertThat(values, hasSize(21)); assertThat(values, hasItems(with("dc.title", "Test Publication"))); assertThat(values, hasItems(with("dc.type", "Controlled Vocabulary for Resource Type Genres::text"))); @@ -885,6 +887,8 @@ public void testRunHarvestWithPersonAndThenPublication() throws Exception { assertThat(values, hasItems(with("oairecerif.author.affiliation", PLACEHOLDER_PARENT_METADATA_VALUE))); assertThat(values, hasItems(with("cris.sourceId", "test-harvest::3"))); assertThat(values, hasItems(with("dspace.entity.type", "Publication"))); + assertThat(values, hasItems(with("cris.virtual.department", PLACEHOLDER_PARENT_METADATA_VALUE))); + assertThat(values, hasItems(with("cris.virtualsource.department", UUIDUtils.toString(person.getID())))); assertThat(values, hasItems(with("cris.virtual.author-orcid", "0000-0002-9079-5932"))); assertThat(values, hasItems(with("cris.virtualsource.author-orcid", UUIDUtils.toString(person.getID())))); From 62309bab1fb9164c051b659fcbf1b6f5b144a578 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Feb 2024 09:45:10 +0100 Subject: [PATCH 675/686] fix obvious typo --- dspace/config/spring/api/csl-citation.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/spring/api/csl-citation.xml b/dspace/config/spring/api/csl-citation.xml index 47186c214ca4..78bf43835f02 100644 --- a/dspace/config/spring/api/csl-citation.xml +++ b/dspace/config/spring/api/csl-citation.xml @@ -8,7 +8,7 @@ - + From 20c67d4289faa7f9c87634271918a5358a38376d Mon Sep 17 00:00:00 2001 From: Mattia Vianelli Date: Tue, 6 Feb 2024 09:50:39 +0100 Subject: [PATCH 676/686] DSC-1526 We now have a check for which if the workspace item is a correction request we check if it is submission correction or a simple submission --- .../app/util/SubmissionConfigReader.java | 35 ++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 2f591b6e7a8c..02d7429b4112 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -30,7 +30,11 @@ import org.dspace.core.Context; import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.services.RequestService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.versioning.ItemCorrectionService; +import org.dspace.web.ContextUtil; +import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; @@ -58,6 +62,13 @@ */ public class SubmissionConfigReader { + + @Autowired + private ItemCorrectionService itemCorrectionService; + + @Autowired + RequestService requestService; + /** * The ID of the default collection. Will never be the ID of a named * collection @@ -430,6 +441,21 @@ private void doNodes(Node n) throws SAXException, SearchServiceException, Submis } } + + private boolean isCorrectionItem(Item item) { + Context context = ContextUtil.obtainCurrentRequestContext(); + ItemCorrectionService itemCorrectionService = + DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(ItemCorrectionService.class) + .get(0); + try { + return itemCorrectionService.checkIfIsCorrectionItem(context, item); + } catch (Exception ex) { + log.error("An error occurs checking if the given item is a correction item.", ex); + return false; + } + } + /** * Process the submission-map section of the XML file. Each element looks * like: Extract @@ -764,6 +790,13 @@ public SubmissionConfig getSubmissionConfigByInProgressSubmission(InProgressSubm return getSubmissionConfigByName(submissionDefinition); } - return getSubmissionConfigByCollection(object.getCollection()); + if (isCorrectionItem(object.getItem())) { + return getCorrectionSubmissionConfigByCollection(object.getCollection()); + } else { + return getSubmissionConfigByCollection(object.getCollection()); + } + } + + } \ No newline at end of file From 786ca86252b7e0fff92918ad0587455dd0b305ef Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Feb 2024 20:04:25 +0100 Subject: [PATCH 677/686] DSC-1458 fix IT failures due to detached entity --- .../dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java index aaa3fb1c62b6..48516a840da6 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/layout/CrisLayoutTabRestRepositoryIT.java @@ -2146,6 +2146,8 @@ public void findByItemTabsWithHiddenRelationshipsTest() throws Exception { context.turnOffAuthorisationSystem(); + // reload the collection as we need to create an additional item in it + col1 = context.reloadEntity(col1); Item publication1 = ItemBuilder.createItem(context, col1) .withTitle("Title Of Item") From 482584827bca6ec97ea36420bd7ed14c1fc9da5f Mon Sep 17 00:00:00 2001 From: Mattia Vianelli Date: Wed, 7 Feb 2024 10:15:25 +0100 Subject: [PATCH 678/686] DSC-1526 Improved code and fixed checkstyle --- .../app/util/SubmissionConfigReader.java | 42 +++++++++---------- .../service/impl/ValidationServiceImpl.java | 3 +- 2 files changed, 23 insertions(+), 22 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 02d7429b4112..57e6a3fafcea 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -33,7 +33,6 @@ import org.dspace.services.RequestService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.versioning.ItemCorrectionService; -import org.dspace.web.ContextUtil; import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; @@ -63,9 +62,6 @@ public class SubmissionConfigReader { - @Autowired - private ItemCorrectionService itemCorrectionService; - @Autowired RequestService requestService; @@ -129,6 +125,16 @@ public class SubmissionConfigReader { protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + /** + * itemCorrectionService instance, needed to retrieve the handle correctly + * item correction actions + * + */ + protected static final ItemCorrectionService itemCorrectionService = + DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(ItemCorrectionService.class) + .get(0); + /** * Load Submission Configuration from the * item-submission.xml configuration file @@ -442,19 +448,7 @@ private void doNodes(Node n) throws SAXException, SearchServiceException, Submis } - private boolean isCorrectionItem(Item item) { - Context context = ContextUtil.obtainCurrentRequestContext(); - ItemCorrectionService itemCorrectionService = - DSpaceServicesFactory.getInstance().getServiceManager() - .getServicesByType(ItemCorrectionService.class) - .get(0); - try { - return itemCorrectionService.checkIfIsCorrectionItem(context, item); - } catch (Exception ex) { - log.error("An error occurs checking if the given item is a correction item.", ex); - return false; - } - } + /** * Process the submission-map section of the XML file. Each element looks @@ -784,19 +778,25 @@ public List getCollectionsBySubmissionConfig(Context context, String return results; } - public SubmissionConfig getSubmissionConfigByInProgressSubmission(InProgressSubmission object) { + public SubmissionConfig getSubmissionConfigByInProgressSubmission(InProgressSubmission object, Context context) { if (object instanceof EditItem) { String submissionDefinition = ((EditItem) object).getMode().getSubmissionDefinition(); return getSubmissionConfigByName(submissionDefinition); } - if (isCorrectionItem(object.getItem())) { + if (isCorrectionItem(object.getItem(), context)) { return getCorrectionSubmissionConfigByCollection(object.getCollection()); } else { return getSubmissionConfigByCollection(object.getCollection()); } - } - + private boolean isCorrectionItem(Item item, Context context) { + try { + return itemCorrectionService.checkIfIsCorrectionItem(context, item); + } catch (Exception ex) { + log.error("An error occurs checking if the given item is a correction item.", ex); + return false; + } + } } \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java b/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java index b4c9b4bc4c1a..65bd0bf19452 100644 --- a/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/validation/service/impl/ValidationServiceImpl.java @@ -55,7 +55,8 @@ private void setup() throws SubmissionConfigReaderException { @Override public List validate(Context context, InProgressSubmission obj) { - SubmissionConfig submissionConfig = submissionConfigReader.getSubmissionConfigByInProgressSubmission(obj); + SubmissionConfig submissionConfig = submissionConfigReader + .getSubmissionConfigByInProgressSubmission(obj, context); List errors = new ArrayList(); From cec6d1a1518c841d9d3b286cc64c1d8f60f797e1 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Wed, 7 Feb 2024 19:20:52 +0100 Subject: [PATCH 679/686] Revert fix of the abstrct typo as it wasn't --- dspace/config/spring/api/csl-citation.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dspace/config/spring/api/csl-citation.xml b/dspace/config/spring/api/csl-citation.xml index 78bf43835f02..ce98c5a30f09 100644 --- a/dspace/config/spring/api/csl-citation.xml +++ b/dspace/config/spring/api/csl-citation.xml @@ -8,7 +8,9 @@ - + + From 98f58abe40c2bfc2cc2b7f7d2ef25e7c645a9d5c Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 11 Jan 2024 11:01:34 +0100 Subject: [PATCH 680/686] [DQ-26] Adds data-quality addon configuration --- .../service/impl/SolrDedupServiceImpl.java | 8 +- .../app/deduplication/utils/DedupUtils.java | 61 +++++---- .../utils/DuplicateInfoList.java | 34 ----- .../app/deduplication/utils/IDedupUtils.java | 79 +++++++++++ .../service/DeduplicationService.java | 11 +- .../DetectPotentialDuplicateValidator.java | 4 +- .../entities/merge-relationship-types.xml | 126 ++++++++++++++++++ dspace-server-webapp/pom.xml | 17 +++ .../DetectDuplicateAddPatchOperation.java | 12 +- .../step/DetectPotentialDuplicateStep.java | 8 +- dspace/config/dspace.cfg | 10 ++ .../entities/merge-relationship-types.xml | 126 ++++++++++++++++++ dspace/config/modules/authority.cfg | 11 ++ dspace/config/modules/rest.cfg | 4 + dspace/config/registries/dspace-types.xml | 8 ++ dspace/modules/additions/pom.xml | 49 ++++--- pom.xml | 21 +++ 17 files changed, 495 insertions(+), 94 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java create mode 100644 dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java create mode 100644 dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml create mode 100644 dspace/config/entities/merge-relationship-types.xml diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java index cc5c0f2bc861..e12f1100be10 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/service/impl/SolrDedupServiceImpl.java @@ -43,8 +43,8 @@ import org.dspace.app.deduplication.service.DedupService; import org.dspace.app.deduplication.service.SearchDeduplication; import org.dspace.app.deduplication.service.SolrDedupServiceIndexPlugin; -import org.dspace.app.deduplication.utils.DedupUtils; import org.dspace.app.deduplication.utils.DuplicateItemInfo; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.deduplication.utils.Signature; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; @@ -174,7 +174,7 @@ public class SolrDedupServiceImpl implements DedupService { protected VersioningService versioningService; @Autowired(required = true) - protected DedupUtils dedupUtils; + protected IDedupUtils dedupUtils; /*** * Deduplication status @@ -750,8 +750,8 @@ private void setDuplicateDecision(Context context, Item item, UUID duplicatedIte private List findDuplicationWithDecisions(Context context, Item item) { try { return dedupUtils.getAdminDuplicateByIdAndType(context, item.getID(), item.getType()).stream() - .filter(duplication -> isNotEmpty(duplication.getDecisionTypes())) - .collect(Collectors.toList()); + .filter(duplication -> isNotEmpty(duplication.getDecisionTypes())) + .collect(Collectors.toList()); } catch (SQLException | SearchServiceException e) { throw new RuntimeException(e); } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java index 97bf4a334652..b4c29d8780c9 100644 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DedupUtils.java @@ -47,12 +47,15 @@ import org.dspace.services.ConfigurationService; import org.dspace.util.ItemUtils; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; /** * Utility class used to search for duplicates inside the dedup solr core. * */ -public class DedupUtils { + +@Service +public class DedupUtils implements IDedupUtils { private static Logger log = LogManager.getLogger(DedupUtils.class); @@ -64,11 +67,14 @@ public class DedupUtils { @Autowired(required = true) protected ConfigurationService configurationService; - public DuplicateInfoList findSignatureWithDuplicate(Context context, String signatureType, int resourceType, - int limit, int offset, int rule) throws SearchServiceException, SQLException { + @Override + public Collection findSignatureWithDuplicate(Context context, String signatureType, int resourceType, + int limit, int offset, int rule) + throws SearchServiceException, SQLException { return findPotentialMatch(context, signatureType, resourceType, limit, offset, rule); } + @Override public Map countSignaturesWithDuplicates(String query, int resourceTypeId) throws SearchServiceException { Map results = new HashMap(); @@ -113,6 +119,7 @@ public Map countSignaturesWithDuplicates(String query, int reso return results; } + @Override public Map countSuggestedDuplicate(String query, int resourceTypeId) throws SearchServiceException { Map results = new HashMap(); @@ -241,8 +248,9 @@ private boolean hasStoredDecision(UUID firstItemID, UUID secondItemID, Duplicate return !response.getResults().isEmpty(); } + @Override public boolean matchExist(Context context, UUID itemID, UUID targetItemID, Integer resourceType, - String signatureType, Boolean isInWorkflow) throws SQLException, SearchServiceException { + String signatureType, Boolean isInWorkflow) throws SQLException, SearchServiceException { boolean exist = false; List potentialDuplicates = findDuplicate(context, itemID, resourceType, null, isInWorkflow); for (DuplicateItemInfo match : potentialDuplicates) { @@ -256,6 +264,7 @@ public boolean matchExist(Context context, UUID itemID, UUID targetItemID, Integ } + @Override public boolean rejectAdminDups(Context context, UUID firstId, UUID secondId, Integer type) throws SQLException, AuthorizeException { if (firstId == secondId) { @@ -309,6 +318,7 @@ public boolean rejectAdminDups(Context context, UUID firstId, UUID secondId, Int * @throws AuthorizeException * @throws SearchServiceException */ + @Override public boolean rejectAdminDups(Context context, UUID itemID, String signatureType, int resourceType) throws SQLException, AuthorizeException, SearchServiceException { @@ -336,6 +346,7 @@ public boolean rejectAdminDups(Context context, UUID itemID, String signatureTyp } + @Override public void rejectAdminDups(Context context, List items, String signatureID) throws SQLException, AuthorizeException, SearchServiceException { for (DSpaceObject item : items) { @@ -343,8 +354,9 @@ public void rejectAdminDups(Context context, List items, String si } } + @Override public void verify(Context context, int dedupId, UUID firstId, UUID secondId, int type, boolean toFix, String note, - boolean check) throws SQLException, AuthorizeException { + boolean check) throws SQLException, AuthorizeException { UUID[] sortedIds = new UUID[] { firstId, secondId }; Arrays.sort(sortedIds); firstId = sortedIds[0]; @@ -417,8 +429,9 @@ private Deduplication retrieveDuplicationRow(Context context, UUID firstId, UUID return row; } + @Override public void setDuplicateDecision(Context context, UUID firstId, UUID secondId, Integer type, - DuplicateDecisionObjectRest decisionObject) + DuplicateDecisionObjectRest decisionObject) throws AuthorizeException, SQLException, SearchServiceException { if (hasAuthorization(context, firstId, secondId)) { @@ -478,6 +491,7 @@ public void setDuplicateDecision(Context context, UUID firstId, UUID secondId, I } } + @Override public boolean validateDecision(DuplicateDecisionObjectRest decisionObject) { boolean valid = false; @@ -500,8 +514,9 @@ public boolean validateDecision(DuplicateDecisionObjectRest decisionObject) { return valid; } + @Override public boolean rejectDups(Context context, UUID firstId, UUID secondId, Integer type, boolean notDupl, String note, - boolean check) throws SQLException { + boolean check) throws SQLException { UUID[] sortedIds = new UUID[] { firstId, secondId }; Arrays.sort(sortedIds); Deduplication row = null; @@ -547,11 +562,9 @@ public boolean rejectDups(Context context, UUID firstId, UUID secondId, Integer return false; } - private DuplicateInfoList findPotentialMatch(Context context, String signatureType, int resourceType, int start, + private List findPotentialMatch(Context context, String signatureType, int resourceType, int start, int rows, int rule) throws SearchServiceException, SQLException { - DuplicateInfoList dil = new DuplicateInfoList(); - if (StringUtils.isNotEmpty(signatureType)) { if (!StringUtils.contains(signatureType, "_signature")) { signatureType += "_signature"; @@ -594,7 +607,7 @@ private DuplicateInfoList findPotentialMatch(Context context, String signatureTy FacetField facetField = responseFacet.getFacetField(signatureType); - List result = new ArrayList(); + List result = new ArrayList<>(); int index = 0; for (Count facetHit : facetField.getValues()) { @@ -653,10 +666,7 @@ private DuplicateInfoList findPotentialMatch(Context context, String signatureTy } index++; } - - dil.setDsi(result); - dil.setSize(facetField.getValues().size()); - return dil; + return result; } private DuplicateSignatureInfo findPotentialMatchByID(Context context, String signatureType, int resourceType, @@ -699,38 +709,45 @@ private DuplicateSignatureInfo findPotentialMatchByID(Context context, String si return dsi; } + @Override public DedupService getDedupService() { return dedupService; } + @Override public void setDedupService(DedupService dedupService) { this.dedupService = dedupService; } + @Override public void commit() { dedupService.commit(); } + @Override public List getDuplicateByIDandType(Context context, UUID itemID, int typeID, - boolean isInWorkflow) throws SQLException, SearchServiceException { + boolean isInWorkflow) + throws SQLException, SearchServiceException { return getDuplicateByIdAndTypeAndSignatureType(context, itemID, typeID, null, isInWorkflow); } + @Override public List getDuplicateByIdAndTypeAndSignatureType(Context context, UUID itemID, int typeID, - String signatureType, boolean isInWorkflow) throws SQLException, SearchServiceException { + String signatureType, boolean isInWorkflow) + throws SQLException, SearchServiceException { return findDuplicate(context, itemID, typeID, signatureType, isInWorkflow); } + @Override public List getAdminDuplicateByIdAndType(Context context, UUID itemID, int typeID) throws SQLException, SearchServiceException { return findDuplicate(context, itemID, typeID, null, null); } - public DuplicateInfoList findSuggestedDuplicate(Context context, int resourceType, int start, int rows) + @Override + public List findSuggestedDuplicate(Context context, int resourceType, int start, int rows) throws SearchServiceException, SQLException { - DuplicateInfoList dil = new DuplicateInfoList(); - SolrQuery solrQueryInternal = new SolrQuery(); solrQueryInternal.setQuery(SolrDedupServiceImpl.SUBQUERY_NOT_IN_REJECTED); @@ -774,8 +791,6 @@ public DuplicateInfoList findSuggestedDuplicate(Context context, int resourceTyp index++; } - dil.setDsi(result); - dil.setSize(solrDocumentList.getNumFound()); - return dil; + return result; } } diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java deleted file mode 100644 index 3935944ffa77..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/DuplicateInfoList.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.deduplication.utils; - -import java.util.List; - -public class DuplicateInfoList { - - private long size; - - private List dsi; - - public long getSize() { - return size; - } - - public void setSize(long size) { - this.size = size; - } - - public List getDsi() { - return dsi; - } - - public void setDsi(List dsi) { - this.dsi = dsi; - } - -} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java new file mode 100644 index 000000000000..774735eaac3a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/deduplication/utils/IDedupUtils.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.deduplication.utils; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.dspace.app.deduplication.model.DuplicateDecisionObjectRest; +import org.dspace.app.deduplication.service.DedupService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + **/ +public interface IDedupUtils { + Collection findSignatureWithDuplicate(Context context, String signatureType, int resourceType, + int limit, int offset, int rule) throws SearchServiceException, + SQLException; + + Map countSignaturesWithDuplicates(String query, int resourceTypeId) + throws SearchServiceException; + + Map countSuggestedDuplicate(String query, int resourceTypeId) + throws SearchServiceException; + + boolean matchExist(Context context, UUID itemID, UUID targetItemID, Integer resourceType, + String signatureType, Boolean isInWorkflow) throws SQLException, SearchServiceException; + + boolean rejectAdminDups(Context context, UUID firstId, UUID secondId, Integer type) + throws SQLException, AuthorizeException; + + boolean rejectAdminDups(Context context, UUID itemID, String signatureType, int resourceType) + throws SQLException, AuthorizeException, SearchServiceException; + + void rejectAdminDups(Context context, List items, String signatureID) + throws SQLException, AuthorizeException, SearchServiceException; + + void verify(Context context, int dedupId, UUID firstId, UUID secondId, int type, boolean toFix, String note, + boolean check) throws SQLException, AuthorizeException; + + void setDuplicateDecision(Context context, UUID firstId, UUID secondId, Integer type, + DuplicateDecisionObjectRest decisionObject) + throws AuthorizeException, SQLException, SearchServiceException; + + boolean validateDecision(DuplicateDecisionObjectRest decisionObject); + + boolean rejectDups(Context context, UUID firstId, UUID secondId, Integer type, boolean notDupl, String note, + boolean check) throws SQLException; + + DedupService getDedupService(); + + void setDedupService(DedupService dedupService); + + void commit(); + + List getDuplicateByIDandType(Context context, UUID itemID, int typeID, + boolean isInWorkflow) throws SQLException, SearchServiceException; + + List getDuplicateByIdAndTypeAndSignatureType(Context context, UUID itemID, int typeID, + String signatureType, boolean isInWorkflow) + throws SQLException, SearchServiceException; + + List getAdminDuplicateByIdAndType(Context context, UUID itemID, int typeID) + throws SQLException, SearchServiceException; + + Collection findSuggestedDuplicate(Context context, int resourceType, int start, int rows) + throws SearchServiceException, SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java b/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java index ab36dc46b4c1..b2826998cccc 100644 --- a/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java +++ b/dspace-api/src/main/java/org/dspace/deduplication/service/DeduplicationService.java @@ -15,6 +15,7 @@ import org.dspace.deduplication.Deduplication; public interface DeduplicationService { + /** * Create a new Deduplication object * @@ -23,7 +24,7 @@ public interface DeduplicationService { * @throws SQLException An exception that provides information on a database * access error or other errors. */ - public Deduplication create(Context context, Deduplication dedup) throws SQLException; + Deduplication create(Context context, Deduplication dedup) throws SQLException; /*** * Return all deduplication objects @@ -35,7 +36,7 @@ public interface DeduplicationService { * @throws SQLException An exception that provides information on a database * access error or other errors. */ - public List findAll(Context context, int pageSize, int offset) throws SQLException; + List findAll(Context context, int pageSize, int offset) throws SQLException; /** * Count all accounts. @@ -55,11 +56,11 @@ public interface DeduplicationService { * @throws SQLException An exception that provides information on a database * access error or other errors. */ - public void update(Context context, Deduplication dedup) throws SQLException; + void update(Context context, Deduplication dedup) throws SQLException; - public List getDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) + List getDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) throws SQLException; - public Deduplication uniqueDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) + Deduplication uniqueDeduplicationByFirstAndSecond(Context context, UUID firstId, UUID secondId) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java b/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java index 8a9a7aba10bd..4b4e237a3772 100644 --- a/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java +++ b/dspace-api/src/main/java/org/dspace/validation/DetectPotentialDuplicateValidator.java @@ -21,8 +21,8 @@ import org.dspace.app.deduplication.model.DuplicateDecisionType; import org.dspace.app.deduplication.model.DuplicateDecisionValue; -import org.dspace.app.deduplication.utils.DedupUtils; import org.dspace.app.deduplication.utils.DuplicateItemInfo; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.util.SubmissionStepConfig; import org.dspace.content.DSpaceObject; import org.dspace.content.InProgressSubmission; @@ -47,7 +47,7 @@ public class DetectPotentialDuplicateValidator implements SubmissionStepValidato private static final String ERROR_VALIDATION_DUPLICATION = "error.validation.detect-duplicate"; @Autowired - private DedupUtils dedupUtils; + private IDedupUtils dedupUtils; @Autowired private ItemService itemService; diff --git a/dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml b/dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml new file mode 100644 index 000000000000..8db947319542 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/entities/merge-relationship-types.xml @@ -0,0 +1,126 @@ + + + + + + Project + Project + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Person + Person + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Funding + Funding + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + OrgUnit + OrgUnit + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Journal + Journal + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Publication + Publication + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Product + Product + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Patent + Patent + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Event + Event + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Equipment + Equipment + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + \ No newline at end of file diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index 0a0b394d576a..c83aa538698f 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -350,6 +350,23 @@ + + + addon-dataquality + + + dq.on + + + + + it.4science.dspace + addon-dataquality + ${addon-dataquality.version} + jar + + + diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java index 4561a8a9c807..819bba0c1423 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/DetectDuplicateAddPatchOperation.java @@ -12,7 +12,7 @@ import org.dspace.app.deduplication.model.DuplicateDecisionObjectRest; import org.dspace.app.deduplication.model.DuplicateDecisionType; -import org.dspace.app.deduplication.utils.DedupUtils; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.patch.LateObjectEvaluator; import org.dspace.content.InProgressSubmission; @@ -43,7 +43,7 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio String.format("The specified path '%s' is not valid", getAbsolutePath(path))); } - DedupUtils dedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", DedupUtils.class); + IDedupUtils IDedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", IDedupUtils.class); DuplicateDecisionObjectRest decisionObject = evaluateSingleObject((LateObjectEvaluator) value); UUID currentItemID = source.getItem().getID(); @@ -98,7 +98,7 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio // generate UnprocessableEntityException if decisionObject is invalid try { - if (!dedupUtils.validateDecision(decisionObject)) { + if (!IDedupUtils.validateDecision(decisionObject)) { throw new UnprocessableEntityException( String.format("The specified decision %s is not valid", decisionObject.getValue())); } @@ -106,13 +106,13 @@ void add(Context context, HttpServletRequest currentRequest, InProgressSubmissio throw new UnprocessableEntityException(String.format("The specified decision %s is not valid", subPath)); } - if (!dedupUtils.matchExist(context, currentItemID, duplicateItemID, resourceType, null, isInWorkflow)) { + if (!IDedupUtils.matchExist(context, currentItemID, duplicateItemID, resourceType, null, isInWorkflow)) { throw new UnprocessableEntityException( String.format("Cannot find any duplicate match related to Item %s", duplicateItemID)); } - dedupUtils.setDuplicateDecision(context, source.getItem().getID(), duplicateItemID, source.getItem().getType(), - decisionObject); + IDedupUtils.setDuplicateDecision(context, source.getItem().getID(), duplicateItemID, source.getItem().getType(), + decisionObject); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java index d7ad62153bcc..90f72afe7f07 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DetectPotentialDuplicateStep.java @@ -15,8 +15,8 @@ import javax.servlet.http.HttpServletRequest; import org.dspace.app.deduplication.model.DuplicateDecisionType; -import org.dspace.app.deduplication.utils.DedupUtils; import org.dspace.app.deduplication.utils.DuplicateItemInfo; +import org.dspace.app.deduplication.utils.IDedupUtils; import org.dspace.app.rest.converter.factory.ConverterServiceFactoryImpl; import org.dspace.app.rest.model.MetadataValueRest; import org.dspace.app.rest.model.patch.Operation; @@ -54,14 +54,14 @@ public class DetectPotentialDuplicateStep extends AbstractProcessingStep { public DataDetectDuplicate getData(SubmissionService submissionService, InProgressSubmission obj, SubmissionStepConfig config) throws Exception { - DedupUtils dedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", DedupUtils.class); + IDedupUtils IDedupUtils = new DSpace().getServiceManager().getServiceByName("dedupUtils", IDedupUtils.class); UUID itemID = obj.getItem().getID(); int typeID = obj.getItem().getType(); boolean check = !(obj instanceof WorkspaceItem); - List potentialDuplicates = dedupUtils.getDuplicateByIDandType(getContext(), itemID, typeID, - check); + List potentialDuplicates = IDedupUtils.getDuplicateByIDandType(getContext(), itemID, typeID, + check); Map matches = processPotentialDuplicates(itemID, check, potentialDuplicates); DataDetectDuplicate result = new DataDetectDuplicate(); diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 05ebdd660679..477d14e207e2 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1926,6 +1926,16 @@ bulk-export.limit.notLoggedIn = 0 # has 2 threads on which schedule events system-event.thread.size = 2 +#------------------------------------------------------------------# +#------------DEDUPLICATION / DATAQUALITY CONFIGURATIONS------------# +#------------------------------------------------------------------# +# # +# Configurations for the Deduplication / DataQuality features # +# # +#------------------------------------------------------------------# +# metadata here listed will be excluded by merge tool logic +merge.excluded-metadata = dc.description.provenance + # Load default module configs # ---------------------------- # To exclude a module configuration, simply comment out its "include" statement. diff --git a/dspace/config/entities/merge-relationship-types.xml b/dspace/config/entities/merge-relationship-types.xml new file mode 100644 index 000000000000..8db947319542 --- /dev/null +++ b/dspace/config/entities/merge-relationship-types.xml @@ -0,0 +1,126 @@ + + + + + + Project + Project + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Person + Person + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Funding + Funding + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + OrgUnit + OrgUnit + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Journal + Journal + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Publication + Publication + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Product + Product + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Patent + Patent + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Event + Event + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + Equipment + Equipment + isMergedFromItem + isMergedInItem + + 0 + + + 0 + + + + \ No newline at end of file diff --git a/dspace/config/modules/authority.cfg b/dspace/config/modules/authority.cfg index 037cf01e5d79..ecc9e858df88 100644 --- a/dspace/config/modules/authority.cfg +++ b/dspace/config/modules/authority.cfg @@ -280,3 +280,14 @@ choices.plugin.dc.type = ControlledVocabularyAuthority # DSpace-CRIS stores by default the authority of controlled vocabularies vocabulary.plugin.authority.store = true + +#------------------------------------------------------------------# +#------------DEDUPLICATION / DATAQUALITY CONFIGURATIONS------------# +#------------------------------------------------------------------# +org.dspace.content.authority.ItemAuthority = OrgUnitAuthority +# AuthorStrictMatchAuthority configuration +cris.ItemAuthority.AuthorStrictMatchAuthority.forceInternalName = false +# AuthorCoarseMatchAuthority configuration +cris.ItemAuthority.AuthorCoarseMatchAuthority.forceInternalName = false +choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority +choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority \ No newline at end of file diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index ab28cd13cc25..faf7b248046b 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -95,6 +95,10 @@ rest.properties.exposed = identifiers.item-status.register-doi rest.properties.exposed = authentication-password.domain.valid rest.properties.exposed = request.item.type rest.properties.exposed = handle.canonical.prefix +#------------------------------------------------------------------# +#------------DEDUPLICATION / DATAQUALITY CONFIGURATIONS------------# +#------------------------------------------------------------------# +rest.properties.exposed = merge.excluded-metadata #---------------------------------------------------------------# # These configs are used by the deprecated REST (v4-6) module # diff --git a/dspace/config/registries/dspace-types.xml b/dspace/config/registries/dspace-types.xml index 861dc67a816a..2cc899dd0cda 100644 --- a/dspace/config/registries/dspace-types.xml +++ b/dspace/config/registries/dspace-types.xml @@ -122,4 +122,12 @@ + + + dspace + merge + target-uri + stores the value of uri of target item + + diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 7de65e9ca49e..f5ae804f8b8a 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -249,22 +249,39 @@ - - addon-analytics - - - analytics.on - - - - - it.4science.dspace - addon-analytics-api - ${addon-analytics.version} - jar - - - + + addon-analytics + + + analytics.on + + + + + it.4science.dspace + addon-analytics-api + ${addon-analytics.version} + jar + + + + + + addon-dataquality + + + dq.on + + + + + it.4science.dspace + addon-dataquality + ${addon-dataquality.version} + jar + + + diff --git a/pom.xml b/pom.xml index 09b01d200b50..c7e823636c09 100644 --- a/pom.xml +++ b/pom.xml @@ -65,6 +65,7 @@ [CRIS-7.1-SNAPSHOT,CRIS-8.0-SNAPSHOT) [CRIS-7.0-SNAPSHOT,CRIS-8.0-SNAPSHOT) [CRIS-7.0-SNAPSHOT,CRIS-8.0-SNAPSHOT) + cris-2023.02.00 UTF-8 @@ -939,6 +940,26 @@ + + + addon-dataquality + + false + + + + + it.4science.dspace + addon-dataquality + ${addon-dataquality.version} + jar + + + + + From 8d73d806644711eca5d8a89cb013a8215b78b477 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Thu, 8 Feb 2024 17:14:27 +0100 Subject: [PATCH 681/686] [DQ-26] Refactors configurations --- dspace/config/dspace.cfg | 10 ---------- dspace/config/modules/authority.cfg | 13 +------------ dspace/config/modules/deduplication.cfg | 10 ++++++++++ pom.xml | 2 +- 4 files changed, 12 insertions(+), 23 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 477d14e207e2..05ebdd660679 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1926,16 +1926,6 @@ bulk-export.limit.notLoggedIn = 0 # has 2 threads on which schedule events system-event.thread.size = 2 -#------------------------------------------------------------------# -#------------DEDUPLICATION / DATAQUALITY CONFIGURATIONS------------# -#------------------------------------------------------------------# -# # -# Configurations for the Deduplication / DataQuality features # -# # -#------------------------------------------------------------------# -# metadata here listed will be excluded by merge tool logic -merge.excluded-metadata = dc.description.provenance - # Load default module configs # ---------------------------- # To exclude a module configuration, simply comment out its "include" statement. diff --git a/dspace/config/modules/authority.cfg b/dspace/config/modules/authority.cfg index ecc9e858df88..86d0248060e5 100644 --- a/dspace/config/modules/authority.cfg +++ b/dspace/config/modules/authority.cfg @@ -279,15 +279,4 @@ authority.controlled.dc.type = true choices.plugin.dc.type = ControlledVocabularyAuthority # DSpace-CRIS stores by default the authority of controlled vocabularies -vocabulary.plugin.authority.store = true - -#------------------------------------------------------------------# -#------------DEDUPLICATION / DATAQUALITY CONFIGURATIONS------------# -#------------------------------------------------------------------# -org.dspace.content.authority.ItemAuthority = OrgUnitAuthority -# AuthorStrictMatchAuthority configuration -cris.ItemAuthority.AuthorStrictMatchAuthority.forceInternalName = false -# AuthorCoarseMatchAuthority configuration -cris.ItemAuthority.AuthorCoarseMatchAuthority.forceInternalName = false -choices.plugin.green.override.dc.contributor.author = AuthorStrictMatchAuthority -choices.plugin.orange.override.dc.contributor.author = AuthorCoarseMatchAuthority \ No newline at end of file +vocabulary.plugin.authority.store = true \ No newline at end of file diff --git a/dspace/config/modules/deduplication.cfg b/dspace/config/modules/deduplication.cfg index 71aa5c8840d7..c2f42dc2902f 100644 --- a/dspace/config/modules/deduplication.cfg +++ b/dspace/config/modules/deduplication.cfg @@ -27,3 +27,13 @@ deduplication.tool.duplicatechecker.ignorewithdrawn = true # only reported section don't check submitter suggestion duplicate deduplication.tool.duplicatechecker.ignore.submitter.suggestion = true + +#------------------------------------------------------------------# +#------------DEDUPLICATION / DATAQUALITY CONFIGURATIONS------------# +#------------------------------------------------------------------# +# # +# Configurations for the Deduplication / DataQuality features # +# # +#------------------------------------------------------------------# +# metadata here listed will be excluded by merge tool logic +merge.excluded-metadata = dc.description.provenance diff --git a/pom.xml b/pom.xml index c7e823636c09..8ec60b3a99df 100644 --- a/pom.xml +++ b/pom.xml @@ -65,7 +65,7 @@ [CRIS-7.1-SNAPSHOT,CRIS-8.0-SNAPSHOT) [CRIS-7.0-SNAPSHOT,CRIS-8.0-SNAPSHOT) [CRIS-7.0-SNAPSHOT,CRIS-8.0-SNAPSHOT) - cris-2023.02.00 + [CRIS-2023.02-SNAPSHOT,CRIS-2023.03-SNAPSHOT) UTF-8 From f3e266878b1a82b7eb70f2b3bf745080afe43bf4 Mon Sep 17 00:00:00 2001 From: Vincenzo Mecca Date: Fri, 9 Feb 2024 13:22:52 +0100 Subject: [PATCH 682/686] [DQ-26] Addresses changes for a new dataquality-types file --- dspace/config/dspace.cfg | 1 + .../config/registries/dataquality-types.xml | 19 +++++++++++++++++++ dspace/config/registries/dspace-types.xml | 8 -------- 3 files changed, 20 insertions(+), 8 deletions(-) create mode 100644 dspace/config/registries/dataquality-types.xml diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 05ebdd660679..98a7aba0aa5c 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1011,6 +1011,7 @@ registry.metadata.load = openaire4-types.xml registry.metadata.load = dspace-types.xml registry.metadata.load = iiif-types.xml registry.metadata.load = bitstream-types.xml +registry.metadata.load = dataquality-types.xml #---------------------------------------------------------------# #-----------------UI-Related CONFIGURATIONS---------------------# diff --git a/dspace/config/registries/dataquality-types.xml b/dspace/config/registries/dataquality-types.xml new file mode 100644 index 000000000000..2658d701dc29 --- /dev/null +++ b/dspace/config/registries/dataquality-types.xml @@ -0,0 +1,19 @@ + + + + DataQuality Addon metadata types + + + + dq + http://dspace.org/dq + + + + dq + merge + target-uri + stores the value of uri of target item + + + \ No newline at end of file diff --git a/dspace/config/registries/dspace-types.xml b/dspace/config/registries/dspace-types.xml index 2cc899dd0cda..861dc67a816a 100644 --- a/dspace/config/registries/dspace-types.xml +++ b/dspace/config/registries/dspace-types.xml @@ -122,12 +122,4 @@ - - - dspace - merge - target-uri - stores the value of uri of target item - - From db85f99920876aa8dcd815d300f81de20d47444c Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Fri, 9 Feb 2024 13:55:31 +0100 Subject: [PATCH 683/686] =?UTF-8?q?[CST-13510]=20Update=20the=20registry-l?= =?UTF-8?q?oader=20script=20and=20related=20classes.=20Added=20new=20scrip?= =?UTF-8?q?t=20argument=20=E2=80=9C-all=E2=80=9D=20that=20will=20run=20imp?= =?UTF-8?q?ort=20of=20all=20metadata=20types=20and=20bitstream=20formats.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dspace/administer/MetadataImporter.java | 35 ++++++++++------- .../org/dspace/administer/RegistryLoader.java | 39 ++++++++++++++++--- .../dspace/storage/rdbms/RegistryUpdater.java | 30 +++++++------- 3 files changed, 68 insertions(+), 36 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java index 501d86af45f8..85f084b03cd0 100644 --- a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java @@ -11,7 +11,6 @@ import java.io.IOException; import java.sql.SQLException; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import javax.xml.parsers.ParserConfigurationException; @@ -35,6 +34,8 @@ import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; @@ -66,10 +67,18 @@ * } */ public class MetadataImporter { + public static final String BASE = DSpaceServicesFactory.getInstance() + .getConfigurationService().getProperty("dspace.dir") + File.separator + "config" + File.separator + + "registries" + File.separator; + public static final String REGISTRY_METADATA_PROPERTY = "registry.metadata.load"; + public static final String REGISTRY_BITSTREAM_FORMAT_PROPERTY = "registry.bitstream-formats.load"; + protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance() .getMetadataSchemaService(); protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance() .getMetadataFieldService(); + protected static ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); /** * logging category @@ -117,25 +126,21 @@ public static void main(String[] args) loadRegistry(file, forceUpdate); } else { boolean forceUpdate = line.hasOption('u'); - for (String file : getAllRegistryFiles()) { + for (String file : getAllRegistryFiles(REGISTRY_METADATA_PROPERTY)) { loadRegistry(file, forceUpdate); } } } - public static List getAllRegistryFiles() { - File folder = new File("config/registries"); - - if (folder.exists() && folder.isDirectory()) { - File[] files = folder.listFiles((dir, name) -> name.toLowerCase().endsWith(".xml")); - - if (files != null) { - return Arrays.stream(files) - .map(file -> "config/registries/" + file.getName()) - .collect(Collectors.toList()); - } - } - return Collections.emptyList(); + /** + * Load all registry file names from config + * + * @param propertyName + * @return list of all registry files + */ + public static List getAllRegistryFiles(String propertyName) { + List files = Arrays.asList(configurationService.getArrayProperty(propertyName)); + return files.stream().map(file -> BASE + file).collect(Collectors.toList()); } /** diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java index ba156cafc89e..7db0cc8d8936 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java @@ -70,7 +70,7 @@ private RegistryLoader() { } */ public static void main(String[] argv) throws Exception { String usage = "Usage: " + RegistryLoader.class.getName() - + " (-bitstream | -metadata) registry-file.xml"; + + " (-bitstream | -metadata | -all) registry-file.xml"; Context context = null; @@ -83,16 +83,21 @@ public static void main(String[] argv) throws Exception { // Work out what we're loading if (argv[0].equalsIgnoreCase("-bitstream")) { - RegistryLoader.loadBitstreamFormats(context, argv[1]); + if (argv.length == 1) { + loadAllBitstreamFormats(context); + } else { + RegistryLoader.loadBitstreamFormats(context, argv[1]); + } } else if (argv[0].equalsIgnoreCase("-metadata")) { // Call MetadataImporter, as it handles Metadata schema updates if (argv.length == 1) { - for (String file : MetadataImporter.getAllRegistryFiles()) { - MetadataImporter.loadRegistry(file, true); - } + loadAllRegistry(); } else { MetadataImporter.loadRegistry(argv[1], true); } + } else if (argv[0].equalsIgnoreCase("-all")) { + loadAllBitstreamFormats(context); + loadAllRegistry(); } else { System.err.println(usage); } @@ -119,6 +124,30 @@ public static void main(String[] argv) throws Exception { } } + + /** + * Load all bitstream formats from configuration properties + * + * @param context + * @throws Exception + */ + private static void loadAllBitstreamFormats(Context context) throws Exception { + for (String file : MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_BITSTREAM_FORMAT_PROPERTY)) { + RegistryLoader.loadBitstreamFormats(context, file); + } + } + + /** + * Load all metadata registry from configuration properties + * + * @throws Exception + */ + private static void loadAllRegistry() throws Exception { + for (String file : MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_METADATA_PROPERTY)) { + MetadataImporter.loadRegistry(file, true); + } + } + /** * Load Bitstream Format metadata * diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java index 6a1d71b9e656..1d9948690d86 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java @@ -7,9 +7,9 @@ */ package org.dspace.storage.rdbms; -import java.io.File; import java.io.IOException; import java.sql.SQLException; +import java.util.List; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.xpath.XPathExpressionException; @@ -20,8 +20,6 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.NonUniqueMetadataException; import org.dspace.core.Context; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.flywaydb.core.api.callback.Callback; import org.flywaydb.core.api.callback.Event; import org.slf4j.Logger; @@ -58,31 +56,31 @@ public class RegistryUpdater implements Callback { * Method to actually update our registries from latest configuration files. */ private void updateRegistries() { - ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); Context context = null; try { context = new Context(); context.turnOffAuthorisationSystem(); - String base = config.getProperty("dspace.dir") - + File.separator + "config" + File.separator - + "registries" + File.separator; - - // Load updates to Bitstream format registry (if any) - String bitstreamFormat = config.getProperty("registry.bitstream-formats.load"); - log.info("Updating Bitstream Format Registry based on {}{}", base, bitstreamFormat); - RegistryLoader.loadBitstreamFormats(context, base + bitstreamFormat); + // Load updates to Bitstream formats registries (if any) + List RegistryMetadataFiles = + MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_METADATA_PROPERTY); + for (String bitstreamFormat : RegistryMetadataFiles) { + log.info("Updating Bitstream Format Registry based on {}", bitstreamFormat); + RegistryLoader.loadBitstreamFormats(context, bitstreamFormat); + } // Load updates to Metadata schema registries (if any) - log.info("Updating Metadata Registries based on metadata type configs in {}", base); - for (String namespaceFile: config.getArrayProperty("registry.metadata.load")) { + List RegistryBitstreamFormatFiles = + MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_BITSTREAM_FORMAT_PROPERTY); + log.info("Updating Metadata Registries based on metadata type configs in {}", MetadataImporter.BASE); + for (String namespaceFile: RegistryBitstreamFormatFiles) { log.info("Reading {}", namespaceFile); - MetadataImporter.loadRegistry(base + namespaceFile, true); + MetadataImporter.loadRegistry(namespaceFile, true); } String workflowTypes = "workflow-types.xml"; log.info("Reading {}", workflowTypes); - MetadataImporter.loadRegistry(base + workflowTypes, true); + MetadataImporter.loadRegistry( MetadataImporter.BASE + workflowTypes, true); context.restoreAuthSystemState(); // Commit changes and close context From 13aa12074f6d881adc3d00ec417ee0dfd9836862 Mon Sep 17 00:00:00 2001 From: "aliaksei.bykau" Date: Fri, 9 Feb 2024 13:57:07 +0100 Subject: [PATCH 684/686] [CST-13510] checkstyle --- .../src/main/java/org/dspace/administer/MetadataImporter.java | 2 +- .../src/main/java/org/dspace/administer/RegistryLoader.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java index 85f084b03cd0..7a1aaa782787 100644 --- a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java @@ -135,7 +135,7 @@ public static void main(String[] args) /** * Load all registry file names from config * - * @param propertyName + * @param propertyName name of the property that used in config * @return list of all registry files */ public static List getAllRegistryFiles(String propertyName) { diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java index 7db0cc8d8936..37876c587ee7 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java @@ -128,7 +128,7 @@ public static void main(String[] argv) throws Exception { /** * Load all bitstream formats from configuration properties * - * @param context + * @param context DSpace context object * @throws Exception */ private static void loadAllBitstreamFormats(Context context) throws Exception { From e666ff85b2aeb2baf0bfc05882310415807bf59e Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Fri, 9 Feb 2024 16:53:25 +0100 Subject: [PATCH 685/686] DSC-1457 fix inverted list of registries (metadata, bitstream) --- .../dspace/storage/rdbms/RegistryUpdater.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java index 1d9948690d86..d6577dc7e19d 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java @@ -62,20 +62,20 @@ private void updateRegistries() { context.turnOffAuthorisationSystem(); // Load updates to Bitstream formats registries (if any) - List RegistryMetadataFiles = - MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_METADATA_PROPERTY); - for (String bitstreamFormat : RegistryMetadataFiles) { + List registryBitstreamFormatFiles = + MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_BITSTREAM_FORMAT_PROPERTY); + for (String bitstreamFormat : registryBitstreamFormatFiles) { log.info("Updating Bitstream Format Registry based on {}", bitstreamFormat); RegistryLoader.loadBitstreamFormats(context, bitstreamFormat); } // Load updates to Metadata schema registries (if any) - List RegistryBitstreamFormatFiles = - MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_BITSTREAM_FORMAT_PROPERTY); + List registryMetadataFiles = + MetadataImporter.getAllRegistryFiles(MetadataImporter.REGISTRY_METADATA_PROPERTY); log.info("Updating Metadata Registries based on metadata type configs in {}", MetadataImporter.BASE); - for (String namespaceFile: RegistryBitstreamFormatFiles) { - log.info("Reading {}", namespaceFile); - MetadataImporter.loadRegistry(namespaceFile, true); + for (String metadataFile : registryMetadataFiles) { + log.info("Reading {}", metadataFile); + MetadataImporter.loadRegistry(metadataFile, true); } String workflowTypes = "workflow-types.xml"; From 81035478f3a7eaca3157b50ab4ef9b879b567356 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Fri, 9 Feb 2024 17:51:16 +0100 Subject: [PATCH 686/686] [maven-release-plugin] prepare release dspace-cris-2023.02.02 --- dspace-api/pom.xml | 2 +- dspace-iiif/pom.xml | 2 +- dspace-oai/pom.xml | 2 +- dspace-rdf/pom.xml | 2 +- dspace-rest/pom.xml | 4 ++-- dspace-server-webapp/pom.xml | 2 +- dspace-services/pom.xml | 2 +- dspace-sword/pom.xml | 2 +- dspace-swordv2/pom.xml | 2 +- dspace/modules/additions/pom.xml | 2 +- dspace/modules/pom.xml | 2 +- dspace/modules/rest/pom.xml | 2 +- dspace/modules/server-boot/pom.xml | 2 +- dspace/modules/server/pom.xml | 2 +- dspace/pom.xml | 2 +- pom.xml | 30 +++++++++++++++--------------- 16 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 1c6879a5d701..a7d1fa104edd 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 3b56ba53e832..f6846b955fbb 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 76718f44ba3c..a3ab33551c0d 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index eb63a67e4579..83af00bc343f 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index 257d0b3a91f8..a77c960283e7 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,7 +12,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index c83aa538698f..a26174341efe 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -14,7 +14,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 39e2ccf36f38..8b67a90a8ea1 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 10a44f3615f6..803eba859119 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index e76bfca65b9f..1c8103016e5d 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -13,7 +13,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index f5ae804f8b8a..db2f30166a7d 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -17,7 +17,7 @@ org.dspace modules - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml index 00f691235bc4..5a4426dddfd0 100644 --- a/dspace/modules/pom.xml +++ b/dspace/modules/pom.xml @@ -11,7 +11,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 ../../pom.xml diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index 4dfa2939bf90..8b80b4021668 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -13,7 +13,7 @@ org.dspace modules - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace/modules/server-boot/pom.xml b/dspace/modules/server-boot/pom.xml index ee23c8ee7bc7..90ceff73554a 100644 --- a/dspace/modules/server-boot/pom.xml +++ b/dspace/modules/server-boot/pom.xml @@ -11,7 +11,7 @@ modules org.dspace - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 3797e809dca5..0d26cdb5fa11 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -7,7 +7,7 @@ modules org.dspace - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 .. diff --git a/dspace/pom.xml b/dspace/pom.xml index 85b98dbb10d5..b8cd8ad6c379 100644 --- a/dspace/pom.xml +++ b/dspace/pom.xml @@ -16,7 +16,7 @@ org.dspace dspace-parent - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 ../pom.xml diff --git a/pom.xml b/pom.xml index 8ec60b3a99df..c7b81d3eeb35 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.dspace dspace-parent pom - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 DSpace Parent Project DSpace-CRIS is an open source extension of DSpace (http://www.dspace.org) providing out of box support for the CRIS / RIMS and moder Institution Repository use cases with advanced features and optimized configurations @@ -979,14 +979,14 @@ org.dspace dspace-rest - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 jar classes org.dspace dspace-rest - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 war @@ -1137,62 +1137,62 @@ org.dspace dspace-api - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-api test-jar - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 test org.dspace.modules additions - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-sword - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-swordv2 - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-oai - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-services - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-server-webapp test-jar - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 test org.dspace dspace-rdf - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-iiif - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 org.dspace dspace-server-webapp - cris-2023.02.02-SNAPSHOT + cris-2023.02.02 @@ -2037,7 +2037,7 @@ scm:git:git@github.com:4Science/DSpace.git scm:git:git@github.com:4Science/DSpace.git git@github.com:4Science/DSpace.git - dspace-cris-2023.02.02-SNAPSHOT + dspace-cris-2023.02.02